diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_azure_blob_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_azure_blob_resource.go index a347f5672913..66f533c78d4c 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_azure_blob_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_azure_blob_resource.go @@ -269,7 +269,7 @@ func resourceDataFactoryDatasetAzureBlobRead(d *schema.ResourceData, meta interf azureBlobTable, ok := resp.Properties.AsAzureBlobDataset() if !ok { - return fmt.Errorf("Error classifiying Data Factory Dataset Azure Blob %q (Data Factory %q / Resource Group %q): Expected: %q Received: %q", name, dataFactoryName, resourceGroup, datafactory.TypeRelationalTable, *resp.Type) + return fmt.Errorf("Error classifying Data Factory Dataset Azure Blob %q (Data Factory %q / Resource Group %q): Expected: %q Received: %q", name, dataFactoryName, resourceGroup, datafactory.TypeRelationalTable, *resp.Type) } d.Set("additional_properties", azureBlobTable.AdditionalProperties) diff --git a/azurerm/internal/services/datafactory/data_factory_linked_service_azure_blob_storage_resource.go b/azurerm/internal/services/datafactory/data_factory_linked_service_azure_blob_storage_resource.go index 4630d7b5c71c..1fa7562ad585 100644 --- a/azurerm/internal/services/datafactory/data_factory_linked_service_azure_blob_storage_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_linked_service_azure_blob_storage_resource.go @@ -199,7 +199,7 @@ func resourceDataFactoryLinkedServiceBlobStorageRead(d *schema.ResourceData, met blobStorage, ok := resp.Properties.AsAzureBlobStorageLinkedService() if !ok { - return fmt.Errorf("Error classifiying Data Factory Linked Service BlobStorage %q (Data Factory %q / Resource Group %q): Expected: %q Received: %q", name, dataFactoryName, resourceGroup, datafactory.TypeWeb, *resp.Type) + return fmt.Errorf("Error classifiying Data Factory Linked Service BlobStorage %q (Data Factory %q / Resource Group %q): Expected: %q Received: %q", name, dataFactoryName, resourceGroup, datafactory.TypeAzureBlobStorage, *resp.Type) } d.Set("additional_properties", blobStorage.AdditionalProperties) diff --git a/azurerm/internal/services/datafactory/data_factory_linked_service_azure_table_storage_resource.go b/azurerm/internal/services/datafactory/data_factory_linked_service_azure_table_storage_resource.go new file mode 100644 index 000000000000..776e762738cf --- /dev/null +++ b/azurerm/internal/services/datafactory/data_factory_linked_service_azure_table_storage_resource.go @@ -0,0 +1,248 @@ +package datafactory + +import ( + "fmt" + "time" + + "github.com/Azure/azure-sdk-for-go/services/datafactory/mgmt/2018-06-01/datafactory" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceDataFactoryLinkedServiceAzureTableStorage() *schema.Resource { + return &schema.Resource{ + Create: resourceDataFactoryLinkedServiceTableStorageCreateUpdate, + Read: resourceDataFactoryLinkedServiceTableStorageRead, + Update: resourceDataFactoryLinkedServiceTableStorageCreateUpdate, + Delete: resourceDataFactoryLinkedServiceTableStorageDelete, + + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validateAzureRMDataFactoryLinkedServiceDatasetName, + }, + + "data_factory_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DataFactoryName(), + }, + + // There's a bug in the Azure API where this is returned in lower-case + // BUG: https://github.com/Azure/azure-rest-api-specs/issues/5788 + "resource_group_name": azure.SchemaResourceGroupNameDiffSuppress(), + + "connection_string": { + Type: schema.TypeString, + Required: true, + Sensitive: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "description": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "integration_runtime_name": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "parameters": { + Type: schema.TypeMap, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + + "annotations": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + + "additional_properties": { + Type: schema.TypeMap, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + } +} + +func resourceDataFactoryLinkedServiceTableStorageCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataFactory.LinkedServiceClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + dataFactoryName := d.Get("data_factory_name").(string) + resourceGroup := d.Get("resource_group_name").(string) + + if d.IsNewResource() { + existing, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for presence of existing Data Factory Linked Service TableStorage Anonymous %q (Data Factory %q / Resource Group %q): %+v", name, dataFactoryName, resourceGroup, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_data_factory_linked_service_azure_table_storage", *existing.ID) + } + } + + tableStorageLinkedService := &datafactory.AzureTableStorageLinkedService{ + Description: utils.String(d.Get("description").(string)), + AzureStorageLinkedServiceTypeProperties: &datafactory.AzureStorageLinkedServiceTypeProperties{ + ConnectionString: &datafactory.SecureString{ + Value: utils.String(d.Get("connection_string").(string)), + Type: datafactory.TypeSecureString, + }, + }, + Type: datafactory.TypeAzureTableStorage, + } + + if v, ok := d.GetOk("parameters"); ok { + tableStorageLinkedService.Parameters = expandDataFactoryParameters(v.(map[string]interface{})) + } + + if v, ok := d.GetOk("integration_runtime_name"); ok { + tableStorageLinkedService.ConnectVia = expandDataFactoryLinkedServiceIntegrationRuntime(v.(string)) + } + + if v, ok := d.GetOk("additional_properties"); ok { + tableStorageLinkedService.AdditionalProperties = v.(map[string]interface{}) + } + + if v, ok := d.GetOk("annotations"); ok { + annotations := v.([]interface{}) + tableStorageLinkedService.Annotations = &annotations + } + + linkedService := datafactory.LinkedServiceResource{ + Properties: tableStorageLinkedService, + } + + if _, err := client.CreateOrUpdate(ctx, resourceGroup, dataFactoryName, name, linkedService, ""); err != nil { + return fmt.Errorf("Error creating/updating Data Factory Linked Service TableStorage %q (Data Factory %q / Resource Group %q): %+v", name, dataFactoryName, resourceGroup, err) + } + + resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") + if err != nil { + return fmt.Errorf("Error retrieving Data Factory Linked Service TableStorage %q (Data Factory %q / Resource Group %q): %+v", name, dataFactoryName, resourceGroup, err) + } + + if resp.ID == nil { + return fmt.Errorf("Cannot read Data Factory Linked Service TableStorage %q (Data Factory %q / Resource Group %q): %+v", name, dataFactoryName, resourceGroup, err) + } + + d.SetId(*resp.ID) + + return resourceDataFactoryLinkedServiceTableStorageRead(d, meta) +} + +func resourceDataFactoryLinkedServiceTableStorageRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataFactory.LinkedServiceClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + resourceGroup := id.ResourceGroup + dataFactoryName := id.Path["factories"] + name := id.Path["linkedservices"] + + resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + d.SetId("") + return nil + } + + return fmt.Errorf("Error retrieving Data Factory Linked Service TableStorage %q (Data Factory %q / Resource Group %q): %+v", name, dataFactoryName, resourceGroup, err) + } + + d.Set("name", resp.Name) + d.Set("resource_group_name", resourceGroup) + d.Set("data_factory_name", dataFactoryName) + + tableStorage, ok := resp.Properties.AsAzureTableStorageLinkedService() + if !ok { + return fmt.Errorf("Error classifying Data Factory Linked Service TableStorage %q (Data Factory %q / Resource Group %q): Expected: %q Received: %q", name, dataFactoryName, resourceGroup, datafactory.TypeAzureTableStorage, *resp.Type) + } + + d.Set("additional_properties", tableStorage.AdditionalProperties) + d.Set("description", tableStorage.Description) + + annotations := flattenDataFactoryAnnotations(tableStorage.Annotations) + if err := d.Set("annotations", annotations); err != nil { + return fmt.Errorf("Error setting `annotations` for Data Factory Linked Service Azure Table Storage %q (Data Factory %q) / Resource Group %q): %+v", name, dataFactoryName, resourceGroup, err) + } + + parameters := flattenDataFactoryParameters(tableStorage.Parameters) + if err := d.Set("parameters", parameters); err != nil { + return fmt.Errorf("Error setting `parameters`: %+v", err) + } + + if connectVia := tableStorage.ConnectVia; connectVia != nil { + if connectVia.ReferenceName != nil { + d.Set("integration_runtime_name", connectVia.ReferenceName) + } + } + + return nil +} + +func resourceDataFactoryLinkedServiceTableStorageDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataFactory.LinkedServiceClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + resourceGroup := id.ResourceGroup + dataFactoryName := id.Path["factories"] + name := id.Path["linkedservices"] + + response, err := client.Delete(ctx, resourceGroup, dataFactoryName, name) + if err != nil { + if !utils.ResponseWasNotFound(response) { + return fmt.Errorf("Error deleting Data Factory Linked Service TableStorage %q (Data Factory %q / Resource Group %q): %+v", name, dataFactoryName, resourceGroup, err) + } + } + + return nil +} diff --git a/azurerm/internal/services/datafactory/registration.go b/azurerm/internal/services/datafactory/registration.go index 353da35be5bd..33f2666ada1c 100644 --- a/azurerm/internal/services/datafactory/registration.go +++ b/azurerm/internal/services/datafactory/registration.go @@ -40,6 +40,7 @@ func (r Registration) SupportedResources() map[string]*schema.Resource { "azurerm_data_factory_integration_runtime_managed": resourceDataFactoryIntegrationRuntimeManaged(), "azurerm_data_factory_integration_runtime_self_hosted": resourceDataFactoryIntegrationRuntimeSelfHosted(), "azurerm_data_factory_linked_service_azure_blob_storage": resourceDataFactoryLinkedServiceAzureBlobStorage(), + "azurerm_data_factory_linked_service_azure_table_storage": resourceDataFactoryLinkedServiceAzureTableStorage(), "azurerm_data_factory_linked_service_azure_file_storage": resourceDataFactoryLinkedServiceAzureFileStorage(), "azurerm_data_factory_linked_service_azure_sql_database": resourceDataFactoryLinkedServiceAzureSQLDatabase(), "azurerm_data_factory_linked_service_azure_function": resourceDataFactoryLinkedServiceAzureFunction(), diff --git a/azurerm/internal/services/datafactory/tests/data_factory_linked_service_azure_table_storage_resource_test.go b/azurerm/internal/services/datafactory/tests/data_factory_linked_service_azure_table_storage_resource_test.go new file mode 100644 index 000000000000..ec833f124a49 --- /dev/null +++ b/azurerm/internal/services/datafactory/tests/data_factory_linked_service_azure_table_storage_resource_test.go @@ -0,0 +1,183 @@ +package datafactory_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type LinkedServiceAzureTableStorageResource struct { +} + +func TestAccDataFactoryLinkedServiceAzureTableStorage_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_linked_service_azure_table_storage", "test") + r := LinkedServiceAzureTableStorageResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.basic(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("connection_string"), + }) +} + +func TestAccDataFactoryLinkedServiceAzureTableStorage_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_linked_service_azure_table_storage", "test") + r := LinkedServiceAzureTableStorageResource{} + + data.ResourceTest(t, r, []resource.TestStep{ + { + Config: r.update1(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("parameters.%").HasValue("2"), + check.That(data.ResourceName).Key("annotations.#").HasValue("3"), + check.That(data.ResourceName).Key("additional_properties.%").HasValue("2"), + check.That(data.ResourceName).Key("description").HasValue("test description"), + ), + }, + data.ImportStep("connection_string"), + { + Config: r.update2(data), + Check: resource.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + check.That(data.ResourceName).Key("parameters.%").HasValue("3"), + check.That(data.ResourceName).Key("annotations.#").HasValue("2"), + check.That(data.ResourceName).Key("additional_properties.%").HasValue("1"), + check.That(data.ResourceName).Key("description").HasValue("test description 2"), + ), + }, + data.ImportStep("connection_string"), + }) +} + +func (t LinkedServiceAzureTableStorageResource) Exists(ctx context.Context, clients *clients.Client, state *terraform.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + dataFactoryName := id.Path["factories"] + name := id.Path["linkedservices"] + + resp, err := clients.DataFactory.LinkedServiceClient.Get(ctx, resourceGroup, dataFactoryName, name, "") + if err != nil { + return nil, fmt.Errorf("reading Data Factory Linked Service Azure Table Storage (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (LinkedServiceAzureTableStorageResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-df-%d" + location = "%s" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_data_factory_linked_service_azure_table_storage" "test" { + name = "acctestlsblob%d" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + connection_string = "DefaultEndpointsProtocol=https;AccountName=foo;AccountKey=bar" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (LinkedServiceAzureTableStorageResource) update1(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-df-%d" + location = "%s" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_data_factory_linked_service_azure_table_storage" "test" { + name = "acctestlsblob%d" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + connection_string = "DefaultEndpointsProtocol=https;AccountName=foo2;AccountKey=bar" + annotations = ["test1", "test2", "test3"] + description = "test description" + + parameters = { + foO = "test1" + bar = "test2" + } + + additional_properties = { + foo = "test1" + bar = "test2" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} + +func (LinkedServiceAzureTableStorageResource) update2(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-df-%d" + location = "%s" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_data_factory_linked_service_azure_table_storage" "test" { + name = "acctestlsblob%d" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + + connection_string = "DefaultEndpointsProtocol=https;AccountName=foo3;AccountKey=bar" + annotations = ["Test1", "Test2"] + description = "test description 2" + + parameters = { + foo = "Test1" + bar = "test2" + buzz = "test3" + } + + additional_properties = { + foo = "test1" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) +} diff --git a/website/docs/r/data_factory_linked_service_azure_blob_storage.html.markdown b/website/docs/r/data_factory_linked_service_azure_blob_storage.html.markdown index 8c1a071915ee..049d42ace5ef 100644 --- a/website/docs/r/data_factory_linked_service_azure_blob_storage.html.markdown +++ b/website/docs/r/data_factory_linked_service_azure_blob_storage.html.markdown @@ -8,7 +8,7 @@ description: |- # azurerm_data_factory_linked_service_azure_blob_storage -Manages a Linked Service (connection) between a SFTP Server and Azure Data Factory. +Manages a Linked Service (connection) between an Azure Blob Storage Account and Azure Data Factory. ~> **Note:** All arguments including the client secret will be stored in the raw state as plain-text. [Read more about sensitive data in state](/docs/state/sensitive-data.html). diff --git a/website/docs/r/data_factory_linked_service_azure_table_storage.html.markdown b/website/docs/r/data_factory_linked_service_azure_table_storage.html.markdown new file mode 100644 index 000000000000..a11d0026bacd --- /dev/null +++ b/website/docs/r/data_factory_linked_service_azure_table_storage.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "Data Factory" +layout: "azurerm" +page_title: "Azure Resource Manager: azurerm_data_factory_linked_service_azure_table_storage" +description: |- + Manages a Linked Service (connection) between an Azure Table Storage and Azure Data Factory. +--- + +# azurerm_data_factory_linked_service_azure_table_storage + +Manages a Linked Service (connection) between an Azure Table Storage and Azure Data Factory. + +~> **Note:** All arguments including the client secret will be stored in the raw state as plain-text. [Read more about sensitive data in state](/docs/state/sensitive-data.html). + +## Example Usage + +```hcl +resource "azurerm_resource_group" "example" { + name = "example-resources" + location = "northeurope" +} + +data "azurerm_storage_account" "example" { + name = "storageaccountname" + resource_group_name = azurerm_resource_group.example.name +} + +resource "azurerm_data_factory" "example" { + name = "example" + location = azurerm_resource_group.example.location + resource_group_name = azurerm_resource_group.example.name +} + +resource "azurerm_data_factory_linked_service_azure_table_storage" "example" { + name = "example" + resource_group_name = azurerm_resource_group.example.name + data_factory_name = azurerm_data_factory.example.name + connection_string = data.azurerm_storage_account.example.primary_connection_string +} +``` + +## Argument Reference + +The following supported arguments are common across all Azure Data Factory Linked Services: + +* `name` - (Required) Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be globally unique. See the [Microsoft documentation](https://docs.microsoft.com/en-us/azure/data-factory/naming-rules) for all restrictions. + +* `resource_group_name` - (Required) The name of the resource group in which to create the Data Factory Linked Service. Changing this forces a new resource + +* `data_factory_name` - (Required) The Data Factory name in which to associate the Linked Service with. Changing this forces a new resource. + +* `description` - (Optional) The description for the Data Factory Linked Service. + +* `integration_runtime_name` - (Optional) The integration runtime reference to associate with the Data Factory Linked Service. + +* `annotations` - (Optional) List of tags that can be used for describing the Data Factory Linked Service. + +* `parameters` - (Optional) A map of parameters to associate with the Data Factory Linked Service. + +* `additional_properties` - (Optional) A map of additional properties to associate with the Data Factory Linked Service. + +The following supported arguments are specific to Azure Table Storage Linked Service: + +* `connection_string` - (Required) The connection string to an Azure Storage Account. + +## Attributes Reference + +The following attributes are exported: + +* `id` - The ID of the Data Factory Linked Service. + +## Timeouts + +The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/docs/configuration/resources.html#timeouts) for certain actions: + +* `create` - (Defaults to 30 minutes) Used when creating the Data Factory Linked Service. +* `update` - (Defaults to 30 minutes) Used when updating the Data Factory Linked Service. +* `read` - (Defaults to 5 minutes) Used when retrieving the Data Factory Linked Service. +* `delete` - (Defaults to 30 minutes) Used when deleting the Data Factory Linked Service. + +## Import + +Data Factory Linked Service's can be imported using the `resource id`, e.g. + +```shell +terraform import azurerm_data_factory_linked_service_azure_table_storage.example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/linkedservices/example +```