From 1b0fe5bf1c5930610032d090e53f137d46ea801a Mon Sep 17 00:00:00 2001 From: Aris van Ommeren Date: Fri, 25 Jun 2021 15:05:26 +0200 Subject: [PATCH 1/8] azurerm_data_factory_dataset_binary: New resource --- .../services/datafactory/data_factory.go | 104 +++++ .../data_factory_dataset_binary_resource.go | 410 ++++++++++++++++++ ...ta_factory_dataset_binary_resource_test.go | 298 +++++++++++++ .../services/datafactory/registration.go | 1 + .../data_factory_dataset_binary.html.markdown | 143 ++++++ 5 files changed, 956 insertions(+) create mode 100644 azurerm/internal/services/datafactory/data_factory_dataset_binary_resource.go create mode 100644 azurerm/internal/services/datafactory/data_factory_dataset_binary_resource_test.go create mode 100644 website/docs/r/data_factory_dataset_binary.html.markdown diff --git a/azurerm/internal/services/datafactory/data_factory.go b/azurerm/internal/services/datafactory/data_factory.go index dc01cf8f1bc0..4a4a1013fb6c 100644 --- a/azurerm/internal/services/datafactory/data_factory.go +++ b/azurerm/internal/services/datafactory/data_factory.go @@ -283,9 +283,25 @@ func expandDataFactoryDatasetLocation(d *pluginsdk.ResourceData) datafactory.Bas return expandDataFactoryDatasetAzureBlobFSLocation(d) } + if _, ok := d.GetOk("sftp_server_location"); ok { + return expandDataFactoryDatasetSFTPServerLocation(d) + } + return nil } +func expandDataFactoryDatasetSFTPServerLocation(d *pluginsdk.ResourceData) datafactory.BasicDatasetLocation { + props := d.Get("sftp_server_location").([]interface{})[0].(map[string]interface{}) + path := props["path"].(string) + filename := props["filename"].(string) + + sftpServerLocation := datafactory.SftpLocation{ + FolderPath: path, + FileName: filename, + } + return sftpServerLocation +} + func expandDataFactoryDatasetHttpServerLocation(d *pluginsdk.ResourceData) datafactory.BasicDatasetLocation { props := d.Get("http_server_location").([]interface{})[0].(map[string]interface{}) relativeUrl := props["relative_url"].(string) @@ -403,3 +419,91 @@ func flattenDataFactoryDatasetAzureBlobFSLocation(input *datafactory.AzureBlobFS }, } } +func flattenDataFactoryDatasetSFTPLocation(input *datafactory.SftpLocation) []interface{} { + if input == nil { + return nil + } + result := make(map[string]interface{}) + + if input.FolderPath != nil { + result["path"] = input.FolderPath + } + if input.FileName != nil { + result["filename"] = input.FileName + } + + return []interface{}{result} +} + +func flattenDataFactoryDatasetCompression(input datafactory.BasicDatasetCompression) []interface{} { + if input == nil { + return nil + } + result := make(map[string]interface{}) + + if compression, ok := input.AsDatasetBZip2Compression(); ok { + result["type"] = compression.Type + } + if compression, ok := input.AsDatasetDeflateCompression(); ok { + result["type"] = compression.Type + } + if compression, ok := input.AsDatasetGZipCompression(); ok { + result["type"] = compression.Type + result["level"] = compression.Level + } + if compression, ok := input.AsDatasetTarCompression(); ok { + result["type"] = compression.Type + } + if compression, ok := input.AsDatasetTarGZipCompression(); ok { + result["type"] = compression.Type + result["level"] = compression.Level + } + if compression, ok := input.AsDatasetZipDeflateCompression(); ok { + result["type"] = compression.Type + result["level"] = compression.Level + } + + return []interface{}{result} +} + +func expandDataFactoryDatasetCompression(d *pluginsdk.ResourceData) datafactory.BasicDatasetCompression { + props := d.Get("compression").([]interface{})[0].(map[string]interface{}) + level := props["level"].(string) + compressionType := props["type"].(string) + + if datafactory.TypeBasicDatasetCompression(compressionType) == datafactory.TypeBasicDatasetCompressionTypeBZip2 { + return datafactory.DatasetBZip2Compression{ + Type: datafactory.TypeBasicDatasetCompression(compressionType), + } + } + if datafactory.TypeBasicDatasetCompression(compressionType) == datafactory.TypeBasicDatasetCompressionTypeDeflate { + return datafactory.DatasetDeflateCompression{ + Type: datafactory.TypeBasicDatasetCompression(compressionType), + } + } + if datafactory.TypeBasicDatasetCompression(compressionType) == datafactory.TypeBasicDatasetCompressionTypeGZip { + return datafactory.DatasetGZipCompression{ + Type: datafactory.TypeBasicDatasetCompression(compressionType), + Level: level, + } + } + if datafactory.TypeBasicDatasetCompression(compressionType) == datafactory.TypeBasicDatasetCompressionTypeTar { + return datafactory.DatasetTarCompression{ + Type: datafactory.TypeBasicDatasetCompression(compressionType), + } + } + if datafactory.TypeBasicDatasetCompression(compressionType) == datafactory.TypeBasicDatasetCompressionTypeTarGZip { + return datafactory.DatasetTarGZipCompression{ + Type: datafactory.TypeBasicDatasetCompression(compressionType), + Level: level, + } + } + if datafactory.TypeBasicDatasetCompression(compressionType) == datafactory.TypeBasicDatasetCompressionTypeZipDeflate { + return datafactory.DatasetZipDeflateCompression{ + Type: datafactory.TypeBasicDatasetCompression(compressionType), + Level: level, + } + } + + return nil +} diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource.go new file mode 100644 index 000000000000..86436d04f7a6 --- /dev/null +++ b/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource.go @@ -0,0 +1,410 @@ +package datafactory + +import ( + "fmt" + "time" + + "github.com/Azure/azure-sdk-for-go/services/datafactory/mgmt/2018-06-01/datafactory" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datafactory/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/pluginsdk" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceDataFactoryDatasetBinary() *pluginsdk.Resource { + return &pluginsdk.Resource{ + Create: resourceDataFactoryDatasetBinaryCreateUpdate, + Read: resourceDataFactoryDatasetBinaryRead, + Update: resourceDataFactoryDatasetBinaryCreateUpdate, + Delete: resourceDataFactoryDatasetBinaryDelete, + + // TODO: replace this with an importer which validates the ID during import + Importer: pluginsdk.DefaultImporter(), + + Timeouts: &pluginsdk.ResourceTimeout{ + Create: pluginsdk.DefaultTimeout(30 * time.Minute), + Read: pluginsdk.DefaultTimeout(5 * time.Minute), + Update: pluginsdk.DefaultTimeout(30 * time.Minute), + Delete: pluginsdk.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*pluginsdk.Schema{ + "name": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.LinkedServiceDatasetName, + }, + + "data_factory_name": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DataFactoryName(), + }, + + // There's a bug in the Azure API where this is returned in lower-case + // BUG: https://github.com/Azure/azure-rest-api-specs/issues/5788 + "resource_group_name": azure.SchemaResourceGroupNameDiffSuppress(), + + "linked_service_name": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + // Binary Dataset Specific Field + "http_server_location": { + Type: pluginsdk.TypeList, + MaxItems: 1, + Optional: true, + // ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "azure_blob_storage_location"}, + ConflictsWith: []string{"azure_blob_storage_location", "sftp_server_location"}, + Elem: &pluginsdk.Resource{ + Schema: map[string]*pluginsdk.Schema{ + "relative_url": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "path": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "filename": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + }, + }, + + "sftp_server_location": { + Type: pluginsdk.TypeList, + MaxItems: 1, + Optional: true, + // ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "azure_blob_storage_location"}, + ConflictsWith: []string{"azure_blob_storage_location", "http_server_location"}, + Elem: &pluginsdk.Resource{ + Schema: map[string]*pluginsdk.Schema{ + "path": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "filename": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + }, + }, + + // Binary Dataset Specific Field + "azure_blob_storage_location": { + Type: pluginsdk.TypeList, + MaxItems: 1, + Optional: true, + // ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "azure_blob_storage_location"}, + ConflictsWith: []string{"http_server_location", "sftp_server_location"}, + Elem: &pluginsdk.Resource{ + Schema: map[string]*pluginsdk.Schema{ + "container": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "path": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "filename": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + }, + }, + + "parameters": { + Type: pluginsdk.TypeMap, + Optional: true, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeString, + }, + }, + + "description": { + Type: pluginsdk.TypeString, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "annotations": { + Type: pluginsdk.TypeList, + Optional: true, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeString, + }, + }, + + "folder": { + Type: pluginsdk.TypeString, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "additional_properties": { + Type: pluginsdk.TypeMap, + Optional: true, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeString, + }, + }, + + "compression": { + Type: pluginsdk.TypeList, + MaxItems: 1, + Optional: true, + Elem: &pluginsdk.Resource{ + Schema: map[string]*pluginsdk.Schema{ + // TarGZip, GZip, ZipDeflate + "level": { + Type: pluginsdk.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{ + "Optimal", + "Fastest", + }, false), + }, + // SFTP Specific field + "type": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{ + string(datafactory.TypeBasicDatasetCompressionTypeBZip2), + string(datafactory.TypeBasicDatasetCompressionTypeDeflate), + string(datafactory.TypeBasicDatasetCompressionTypeGZip), + string(datafactory.TypeBasicDatasetCompressionTypeTar), + string(datafactory.TypeBasicDatasetCompressionTypeTarGZip), + string(datafactory.TypeBasicDatasetCompressionTypeZipDeflate), + }, false), + }, + }, + }, + }, + }, + } +} + +func resourceDataFactoryDatasetBinaryCreateUpdate(d *pluginsdk.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataFactory.DatasetClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + dataFactoryName := d.Get("data_factory_name").(string) + resourceGroup := d.Get("resource_group_name").(string) + + if d.IsNewResource() { + existing, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for presence of existing Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_data_factory_dataset_binary", *existing.ID) + } + } + + location := expandDataFactoryDatasetLocation(d) + if location == nil { + return fmt.Errorf("one of `http_server_location`, `azure_blob_storage_location` or `sftp_server_location`, must be specified to create a DataFactory Binary Dataset") + } + + binaryDatasetProperties := datafactory.BinaryDatasetTypeProperties{ + Location: location, + } + + if _, ok := d.GetOk("compression"); ok { + binaryDatasetProperties.Compression = expandDataFactoryDatasetCompression(d) + } + + linkedServiceName := d.Get("linked_service_name").(string) + linkedServiceType := "LinkedServiceReference" + linkedService := &datafactory.LinkedServiceReference{ + ReferenceName: &linkedServiceName, + Type: &linkedServiceType, + } + + description := d.Get("description").(string) + // TODO + binaryTableset := datafactory.BinaryDataset{ + BinaryDatasetTypeProperties: &binaryDatasetProperties, + LinkedServiceName: linkedService, + Description: &description, + } + + if v, ok := d.GetOk("folder"); ok { + name := v.(string) + binaryTableset.Folder = &datafactory.DatasetFolder{ + Name: &name, + } + } + + if v, ok := d.GetOk("parameters"); ok { + binaryTableset.Parameters = expandDataFactoryParameters(v.(map[string]interface{})) + } + + if v, ok := d.GetOk("annotations"); ok { + annotations := v.([]interface{}) + binaryTableset.Annotations = &annotations + } + + if v, ok := d.GetOk("additional_properties"); ok { + binaryTableset.AdditionalProperties = v.(map[string]interface{}) + } + + datasetType := string(datafactory.TypeBasicDatasetTypeBinary) + dataset := datafactory.DatasetResource{ + Properties: &binaryTableset, + Type: &datasetType, + } + + if _, err := client.CreateOrUpdate(ctx, resourceGroup, dataFactoryName, name, dataset, ""); err != nil { + return fmt.Errorf("creating/updating Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err) + } + + resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") + if err != nil { + return fmt.Errorf("retrieving Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err) + } + + if resp.ID == nil { + return fmt.Errorf("cannot read Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err) + } + + d.SetId(*resp.ID) + + return resourceDataFactoryDatasetBinaryRead(d, meta) +} + +func resourceDataFactoryDatasetBinaryRead(d *pluginsdk.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataFactory.DatasetClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + resourceGroup := id.ResourceGroup + dataFactoryName := id.Path["factories"] + name := id.Path["datasets"] + + resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + d.SetId("") + return nil + } + + return fmt.Errorf("retrieving Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err) + } + + d.Set("name", resp.Name) + d.Set("resource_group_name", resourceGroup) + d.Set("data_factory_name", dataFactoryName) + + binaryTable, ok := resp.Properties.AsBinaryDataset() + if !ok { + return fmt.Errorf("classifiying Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): Expected: %q Received: %q", name, dataFactoryName, resourceGroup, datafactory.TypeBasicDatasetTypeBinary, *resp.Type) + } + + d.Set("additional_properties", binaryTable.AdditionalProperties) + + if binaryTable.Description != nil { + d.Set("description", binaryTable.Description) + } + + parameters := flattenDataFactoryParameters(binaryTable.Parameters) + if err := d.Set("parameters", parameters); err != nil { + return fmt.Errorf("setting `parameters`: %+v", err) + } + + annotations := flattenDataFactoryAnnotations(binaryTable.Annotations) + if err := d.Set("annotations", annotations); err != nil { + return fmt.Errorf("setting `annotations`: %+v", err) + } + + if linkedService := binaryTable.LinkedServiceName; linkedService != nil { + if linkedService.ReferenceName != nil { + d.Set("linked_service_name", linkedService.ReferenceName) + } + } + + if properties := binaryTable.BinaryDatasetTypeProperties; properties != nil { + if httpServerLocation, ok := properties.Location.AsHTTPServerLocation(); ok { + if err := d.Set("http_server_location", flattenDataFactoryDatasetHTTPServerLocation(httpServerLocation)); err != nil { + return fmt.Errorf("setting `http_server_location` for Data Factory Binary Dataset %s", err) + } + } + if azureBlobStorageLocation, ok := properties.Location.AsAzureBlobStorageLocation(); ok { + if err := d.Set("azure_blob_storage_location", flattenDataFactoryDatasetAzureBlobStorageLocation(azureBlobStorageLocation)); err != nil { + return fmt.Errorf("setting `azure_blob_storage_location` for Data Factory Binary Dataset %s", err) + } + } + if sftpLocation, ok := properties.Location.AsSftpLocation(); ok { + if err := d.Set("sftp_server_location", flattenDataFactoryDatasetSFTPLocation(sftpLocation)); err != nil { + return fmt.Errorf("setting `sftp_server_location` for Data Factory Binary Dataset %s", err) + } + } + + compression := flattenDataFactoryDatasetCompression(properties.Compression) + if err := d.Set("compression", compression); err != nil { + return fmt.Errorf("setting `compression`: %+v", err) + } + } + + if folder := binaryTable.Folder; folder != nil { + if folder.Name != nil { + d.Set("folder", folder.Name) + } + } + + return nil +} + +func resourceDataFactoryDatasetBinaryDelete(d *pluginsdk.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataFactory.DatasetClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + resourceGroup := id.ResourceGroup + dataFactoryName := id.Path["factories"] + name := id.Path["datasets"] + + response, err := client.Delete(ctx, resourceGroup, dataFactoryName, name) + if err != nil { + if !utils.ResponseWasNotFound(response) { + return fmt.Errorf("deleting Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err) + } + } + + return nil +} diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource_test.go b/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource_test.go new file mode 100644 index 000000000000..b718f53e49e2 --- /dev/null +++ b/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource_test.go @@ -0,0 +1,298 @@ +package datafactory_test + +import ( + "context" + "fmt" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/pluginsdk" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DatasetBinaryResource struct { +} + +func TestAccDataFactoryDatasetBinary_blob(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_binary", "test") + r := DatasetBinaryResource{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.blob(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataFactoryDatasetBinary_http(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_binary", "test") + r := DatasetBinaryResource{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.http(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataFactoryDatasetBinary_sftp(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_binary", "test") + r := DatasetBinaryResource{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.sftp(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataFactoryDatasetBinary_SftpComplete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_binary", "test") + r := DatasetBinaryResource{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.sftp_complete(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t DatasetBinaryResource) Exists(ctx context.Context, clients *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + dataFactoryName := id.Path["factories"] + name := id.Path["datasets"] + + resp, err := clients.DataFactory.DatasetClient.Get(ctx, resourceGroup, dataFactoryName, name, "") + if err != nil { + return nil, fmt.Errorf("reading Data Factory Dataset Binary (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (DatasetBinaryResource) blob(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-df-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestdf%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + account_tier = "Standard" + account_replication_type = "GRS" +} + +resource "azurerm_storage_container" "test" { + name = "content" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + + +resource "azurerm_data_factory_linked_service_azure_blob_storage" "test" { + name = "acctestlsblob%d" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + connection_string = azurerm_storage_account.test.primary_connection_string +} + +resource "azurerm_data_factory_dataset_binary" "test" { + name = "acctestds%d" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + linked_service_name = azurerm_data_factory_linked_service_azure_blob_storage.test.name + + azure_blob_storage_location { + container = azurerm_storage_container.test.name + path = "foo/bar/" + filename = "foo.txt" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (DatasetBinaryResource) http(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-df-%d" + location = "%s" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_data_factory_linked_service_web" "test" { + name = "acctestlsweb%d" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + authentication_type = "Anonymous" + url = "https://www.bing.com" +} + +resource "azurerm_data_factory_dataset_binary" "test" { + name = "acctestds%d" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + linked_service_name = azurerm_data_factory_linked_service_web.test.name + + http_server_location { + relative_url = "/fizz/buzz/" + path = "foo/bar/" + filename = "foo.txt" + } + + compression { + type = "GZip" + level = "Optimal" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (DatasetBinaryResource) sftp(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-df-%d" + location = "%s" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_data_factory_linked_service_sftp" "test" { + name = "acctestlssftp%d" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + authentication_type = "Basic" + host = "http://www.bing.com" + port = 22 + username = "foo" + password = "bar" +} + +resource "azurerm_data_factory_dataset_binary" "test" { + name = "acctestds%d" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + linked_service_name = azurerm_data_factory_linked_service_sftp.test.name + + sftp_server_location { + path = "/test/" + filename = "**" + } +} + +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (DatasetBinaryResource) sftp_complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-df-%d" + location = "%s" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_data_factory_linked_service_sftp" "test" { + name = "acctestlssftp%d" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + authentication_type = "Basic" + host = "http://www.bing.com" + port = 22 + username = "foo" + password = "bar" +} + +resource "azurerm_data_factory_dataset_binary" "test" { + name = "acctestds%d" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + linked_service_name = azurerm_data_factory_linked_service_sftp.test.name + + sftp_server_location { + path = "/test/" + filename = "**" + } + + compression { + type = "GZip" + level = "Fastest" + } + + description = "test description 2" + annotations = ["test1", "test2"] + folder = "testFolder" + + parameters = { + foo = "test1" + bar = "test2" + buzz = "test3" + } + + additional_properties = { + foo = "test1" + } +} + +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/datafactory/registration.go b/azurerm/internal/services/datafactory/registration.go index 1bac8a2cc6be..7b43b643191b 100644 --- a/azurerm/internal/services/datafactory/registration.go +++ b/azurerm/internal/services/datafactory/registration.go @@ -30,6 +30,7 @@ func (r Registration) SupportedResources() map[string]*pluginsdk.Resource { return map[string]*pluginsdk.Resource{ "azurerm_data_factory": resourceDataFactory(), "azurerm_data_factory_dataset_azure_blob": resourceDataFactoryDatasetAzureBlob(), + "azurerm_data_factory_dataset_binary": resourceDataFactoryDatasetBinary(), "azurerm_data_factory_dataset_cosmosdb_sqlapi": resourceDataFactoryDatasetCosmosDbSQLAPI(), "azurerm_data_factory_dataset_delimited_text": resourceDataFactoryDatasetDelimitedText(), "azurerm_data_factory_dataset_http": resourceDataFactoryDatasetHTTP(), diff --git a/website/docs/r/data_factory_dataset_binary.html.markdown b/website/docs/r/data_factory_dataset_binary.html.markdown new file mode 100644 index 000000000000..62bd25781d57 --- /dev/null +++ b/website/docs/r/data_factory_dataset_binary.html.markdown @@ -0,0 +1,143 @@ +--- +subcategory: "Data Factory" +layout: "azurerm" +page_title: "Azure Resource Manager: azurerm_data_factory_dataset_binary" +description: |- + Manages a Data Factory Binary Dataset inside an Azure Data Factory. +--- + +# azurerm_data_factory_dataset_binary + +Manages a Data Factory Binary Dataset inside an Azure Data Factory. + +## Example Usage + +```hcl +resource "azurerm_resource_group" "example" { + name = "example" + location = "West Europe" +} + +resource "azurerm_data_factory" "example" { + name = "example" + location = azurerm_resource_group.example.location + resource_group_name = azurerm_resource_group.example.name +} + +resource "azurerm_data_factory_linked_service_sftp" "example" { + name = "example" + resource_group_name = azurerm_resource_group.example.name + data_factory_name = azurerm_data_factory.example.name + + authentication_type = "Basic" + host = "http://www.bing.com" + port = 22 + username = "foo" + password = "bar" +} + +resource "azurerm_data_factory_dataset_binary" "example" { + name = "example" + resource_group_name = azurerm_resource_group.example.name + data_factory_name = azurerm_data_factory.example.name + linked_service_name = azurerm_data_factory_linked_service_sftp.example.name + + sftp_server_location { + path = "/test/" + filename = "**" + } +} +``` + +## Arguments Reference + +The following arguments are supported: + +* `name` - (Required) Specifies the name of the Data Factory Binary Dataset. Changing this forces a new resource to be created. Must be globally unique. See the [Microsoft documentation](https://docs.microsoft.com/en-us/azure/data-factory/naming-rules) for all restrictions. + +* `data_factory_name` - (Required) The Data Factory name in which to associate the Binary Dataset with. Changing this forces a new resource. + +* `linked_service_name` - (Required) The Data Factory Linked Service name in which to associate the Binary Dataset with. + +* `resource_group_name` - (Required) The name of the Resource Group where the Data Factory should exist. Changing this forces a new Data Factory Binary Dataset to be created. + +--- + +* `additional_properties` - (Optional) A map of additional properties to associate with the Data Factory Binary Dataset. + +* `annotations` - (Optional) List of tags that can be used for describing the Data Factory Binary Dataset. + +* `compression` - (Optional) A `compression` block as defined below. + +* `description` - (Optional) The description for the Data Factory Dataset. + +* `folder` - (Optional) The folder that this Dataset is in. If not specified, the Dataset will appear at the root level. + +* `parameters` - (Optional) Specifies a list of parameters to associate with the Data Factory Binary Dataset. + +The following supported locations for a Binary Dataset. One of these should be specified: + +* `http_server_location` - (Optional) A `http_server_location` block as defined below. + +* `azure_blob_storage_location` - (Optional) A `azure_blob_storage_location` block as defined below. + +* `sftp_server_location` - (Optional) A `sftp_server_location` block as defined below. +--- + +A `compression` block supports the following: + +* `type` - (Required) The type of compression used during transport. + +* `level` - (Optional) The level of compression. Possible values are `Fastest` and `Optimal`. + +--- + +A `http_server_location` block supports the following: + +* `relative_url` - (Required) The base URL to the web server hosting the file. + +* `path` - (Required) The folder path to the file on the web server. + +* `filename` - (Required) The filename of the file on the web server. + +--- + +A `azure_blob_storage_location` block supports the following: + +* `container` - (Required) The container on the Azure Blob Storage Account hosting the file. + +* `path` - (Required) The folder path to the file on the web server. + +* `filename` - (Required) The filename of the file on the web server. + +--- + +A `sftp_server_location` block supports the following: + +* `path` - (Required) The folder path to the file on the SFTP server. + +* `filename` - (Required) The filename of the file on the SFTP server. + + +## Attributes Reference + +In addition to the Arguments listed above - the following Attributes are exported: + +* `id` - The ID of the Data Factory Dataset. + +## Timeouts + +The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/docs/configuration/resources.html#timeouts) for certain actions: + +* `create` - (Defaults to 30 minutes) Used when creating the Data Factory. +* `read` - (Defaults to 5 minutes) Used when retrieving the Data Factory. +* `update` - (Defaults to 30 minutes) Used when updating the Data Factory. +* `delete` - (Defaults to 30 minutes) Used when deleting the Data Factory. + +## Import + +Data Factorys can be imported using the `resource id`, e.g. + +```shell +terraform import azurerm_data_factory_dataset_binary.example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/datasets/example +``` \ No newline at end of file From c34400de3a953dc67364472fbe21bd07bcdf8049 Mon Sep 17 00:00:00 2001 From: Aris van Ommeren Date: Fri, 25 Jun 2021 15:36:36 +0200 Subject: [PATCH 2/8] Fix docs --- website/docs/r/data_factory_dataset_binary.html.markdown | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/website/docs/r/data_factory_dataset_binary.html.markdown b/website/docs/r/data_factory_dataset_binary.html.markdown index 62bd25781d57..364626d7791a 100644 --- a/website/docs/r/data_factory_dataset_binary.html.markdown +++ b/website/docs/r/data_factory_dataset_binary.html.markdown @@ -43,8 +43,8 @@ resource "azurerm_data_factory_dataset_binary" "example" { linked_service_name = azurerm_data_factory_linked_service_sftp.example.name sftp_server_location { - path = "/test/" - filename = "**" + path = "/test/" + filename = "**" } } ``` @@ -136,8 +136,8 @@ The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/d ## Import -Data Factorys can be imported using the `resource id`, e.g. +Data Factorie Binary Datasets can be imported using the `resource id`, e.g. ```shell terraform import azurerm_data_factory_dataset_binary.example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/datasets/example -``` \ No newline at end of file +``` From 555d6120a84d2ee3609e5cd32ae57468ea67dcc6 Mon Sep 17 00:00:00 2001 From: Aris van Ommeren Date: Fri, 2 Jul 2021 15:08:34 +0200 Subject: [PATCH 3/8] sftp_server_location.filename can be empty --- .../datafactory/data_factory_dataset_binary_resource.go | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource.go index 86436d04f7a6..f30ad00c4919 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource.go @@ -99,9 +99,8 @@ func resourceDataFactoryDatasetBinary() *pluginsdk.Resource { ValidateFunc: validation.StringIsNotEmpty, }, "filename": { - Type: pluginsdk.TypeString, - Required: true, - ValidateFunc: validation.StringIsNotEmpty, + Type: pluginsdk.TypeString, + Required: true, }, }, }, From 87fbaf1463a49040c3d40371b19669cfa371e1f2 Mon Sep 17 00:00:00 2001 From: Aris van Ommeren Date: Mon, 5 Jul 2021 14:03:31 +0200 Subject: [PATCH 4/8] First round of fixes based on review --- .../services/datafactory/data_factory.go | 24 ++++++-- .../data_factory_dataset_binary_resource.go | 58 ++++++++----------- .../data_factory_dataset_binary.html.markdown | 8 +-- 3 files changed, 48 insertions(+), 42 deletions(-) diff --git a/azurerm/internal/services/datafactory/data_factory.go b/azurerm/internal/services/datafactory/data_factory.go index 4a4a1013fb6c..b006a156d1b4 100644 --- a/azurerm/internal/services/datafactory/data_factory.go +++ b/azurerm/internal/services/datafactory/data_factory.go @@ -291,7 +291,11 @@ func expandDataFactoryDatasetLocation(d *pluginsdk.ResourceData) datafactory.Bas } func expandDataFactoryDatasetSFTPServerLocation(d *pluginsdk.ResourceData) datafactory.BasicDatasetLocation { - props := d.Get("sftp_server_location").([]interface{})[0].(map[string]interface{}) + sftpServerLocations := d.Get("sftp_server_location").([]interface{}) + if len(sftpServerLocations) == 0 || sftpServerLocations[0] == nil { + return nil + } + props := sftpServerLocations[0].(map[string]interface{}) path := props["path"].(string) filename := props["filename"].(string) @@ -303,7 +307,11 @@ func expandDataFactoryDatasetSFTPServerLocation(d *pluginsdk.ResourceData) dataf } func expandDataFactoryDatasetHttpServerLocation(d *pluginsdk.ResourceData) datafactory.BasicDatasetLocation { - props := d.Get("http_server_location").([]interface{})[0].(map[string]interface{}) + httpServerLocations := d.Get("http_server_location").([]interface{}) + if len(httpServerLocations) == 0 || httpServerLocations[0] == nil { + return nil + } + props := httpServerLocations[0].(map[string]interface{}) relativeUrl := props["relative_url"].(string) path := props["path"].(string) filename := props["filename"].(string) @@ -317,7 +325,11 @@ func expandDataFactoryDatasetHttpServerLocation(d *pluginsdk.ResourceData) dataf } func expandDataFactoryDatasetAzureBlobStorageLocation(d *pluginsdk.ResourceData) datafactory.BasicDatasetLocation { - props := d.Get("azure_blob_storage_location").([]interface{})[0].(map[string]interface{}) + azureBlobStorageLocations := d.Get("azure_blob_storage_location").([]interface{}) + if len(azureBlobStorageLocations) == 0 || azureBlobStorageLocations[0] == nil { + return nil + } + props := azureBlobStorageLocations[0].(map[string]interface{}) container := props["container"].(string) path := props["path"].(string) filename := props["filename"].(string) @@ -467,7 +479,11 @@ func flattenDataFactoryDatasetCompression(input datafactory.BasicDatasetCompress } func expandDataFactoryDatasetCompression(d *pluginsdk.ResourceData) datafactory.BasicDatasetCompression { - props := d.Get("compression").([]interface{})[0].(map[string]interface{}) + compression := d.Get("compression").([]interface{}) + if len(compression) == 0 || compression[0] == nil { + return nil + } + props := compression[0].(map[string]interface{}) level := props["level"].(string) compressionType := props["type"].(string) diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource.go index f30ad00c4919..3f9384ae62b6 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource.go @@ -8,6 +8,7 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datafactory/parse" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datafactory/validate" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/pluginsdk" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/validation" @@ -21,9 +22,10 @@ func resourceDataFactoryDatasetBinary() *pluginsdk.Resource { Read: resourceDataFactoryDatasetBinaryRead, Update: resourceDataFactoryDatasetBinaryCreateUpdate, Delete: resourceDataFactoryDatasetBinaryDelete, - - // TODO: replace this with an importer which validates the ID during import - Importer: pluginsdk.DefaultImporter(), + Importer: pluginsdk.ImporterValidatingResourceId(func(id string) error { + _, err := parse.DataSetID(id) + return err + }), Timeouts: &pluginsdk.ResourceTimeout{ Create: pluginsdk.DefaultTimeout(30 * time.Minute), @@ -207,18 +209,17 @@ func resourceDataFactoryDatasetBinary() *pluginsdk.Resource { func resourceDataFactoryDatasetBinaryCreateUpdate(d *pluginsdk.ResourceData, meta interface{}) error { client := meta.(*clients.Client).DataFactory.DatasetClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - name := d.Get("name").(string) - dataFactoryName := d.Get("data_factory_name").(string) - resourceGroup := d.Get("resource_group_name").(string) + id := parse.NewDataSetID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string), d.Get("name").(string)) if d.IsNewResource() { - existing, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") + existing, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for presence of existing Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err) + return fmt.Errorf("checking for presence of existing Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): %s", id.Name, id.FactoryName, id.ResourceGroup, err) } } @@ -248,7 +249,6 @@ func resourceDataFactoryDatasetBinaryCreateUpdate(d *pluginsdk.ResourceData, met } description := d.Get("description").(string) - // TODO binaryTableset := datafactory.BinaryDataset{ BinaryDatasetTypeProperties: &binaryDatasetProperties, LinkedServiceName: linkedService, @@ -281,20 +281,16 @@ func resourceDataFactoryDatasetBinaryCreateUpdate(d *pluginsdk.ResourceData, met Type: &datasetType, } - if _, err := client.CreateOrUpdate(ctx, resourceGroup, dataFactoryName, name, dataset, ""); err != nil { - return fmt.Errorf("creating/updating Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err) + if _, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.FactoryName, id.Name, dataset, ""); err != nil { + return fmt.Errorf("creating/updating Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): %s", id.Name, id.FactoryName, id.ResourceGroup, err) } - resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") + _, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { - return fmt.Errorf("retrieving Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err) - } - - if resp.ID == nil { - return fmt.Errorf("cannot read Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err) + return fmt.Errorf("retrieving Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): %s", id.Name, id.FactoryName, id.ResourceGroup, err) } - d.SetId(*resp.ID) + d.SetId(id.ID()) return resourceDataFactoryDatasetBinaryRead(d, meta) } @@ -304,31 +300,28 @@ func resourceDataFactoryDatasetBinaryRead(d *pluginsdk.ResourceData, meta interf ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.DataSetID(d.Id()) if err != nil { return err } - resourceGroup := id.ResourceGroup - dataFactoryName := id.Path["factories"] - name := id.Path["datasets"] - resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") + resp, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if utils.ResponseWasNotFound(resp.Response) { d.SetId("") return nil } - return fmt.Errorf("retrieving Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err) + return fmt.Errorf("retrieving Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): %s", id.Name, id.FactoryName, id.ResourceGroup, err) } - d.Set("name", resp.Name) - d.Set("resource_group_name", resourceGroup) - d.Set("data_factory_name", dataFactoryName) + d.Set("name", id.Name) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("data_factory_name", id.FactoryName) binaryTable, ok := resp.Properties.AsBinaryDataset() if !ok { - return fmt.Errorf("classifiying Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): Expected: %q Received: %q", name, dataFactoryName, resourceGroup, datafactory.TypeBasicDatasetTypeBinary, *resp.Type) + return fmt.Errorf("classifiying Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): Expected: %q Received: %q", id.Name, id.FactoryName, id.ResourceGroup, datafactory.TypeBasicDatasetTypeBinary, *resp.Type) } d.Set("additional_properties", binaryTable.AdditionalProperties) @@ -390,18 +383,15 @@ func resourceDataFactoryDatasetBinaryDelete(d *pluginsdk.ResourceData, meta inte ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := azure.ParseAzureResourceID(d.Id()) + id, err := parse.DataSetID(d.Id()) if err != nil { return err } - resourceGroup := id.ResourceGroup - dataFactoryName := id.Path["factories"] - name := id.Path["datasets"] - response, err := client.Delete(ctx, resourceGroup, dataFactoryName, name) + response, err := client.Delete(ctx, id.ResourceGroup, id.FactoryName, id.Name) if err != nil { if !utils.ResponseWasNotFound(response) { - return fmt.Errorf("deleting Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err) + return fmt.Errorf("deleting Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): %s", id.Name, id.FactoryName, id.ResourceGroup, err) } } diff --git a/website/docs/r/data_factory_dataset_binary.html.markdown b/website/docs/r/data_factory_dataset_binary.html.markdown index 364626d7791a..bba122551f1f 100644 --- a/website/docs/r/data_factory_dataset_binary.html.markdown +++ b/website/docs/r/data_factory_dataset_binary.html.markdown @@ -129,10 +129,10 @@ In addition to the Arguments listed above - the following Attributes are exporte The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/docs/configuration/resources.html#timeouts) for certain actions: -* `create` - (Defaults to 30 minutes) Used when creating the Data Factory. -* `read` - (Defaults to 5 minutes) Used when retrieving the Data Factory. -* `update` - (Defaults to 30 minutes) Used when updating the Data Factory. -* `delete` - (Defaults to 30 minutes) Used when deleting the Data Factory. +* `create` - (Defaults to 30 minutes) Used when creating the Data Factory Dataset. +* `read` - (Defaults to 5 minutes) Used when retrieving the Data Factory Dataset. +* `update` - (Defaults to 30 minutes) Used when updating the Data Factory Dataset. +* `delete` - (Defaults to 30 minutes) Used when deleting the Data Factory Dataset. ## Import From 3f9046caf975426f46df9f07c0bcf7442871c58f Mon Sep 17 00:00:00 2001 From: Aris van Ommeren Date: Fri, 9 Jul 2021 09:01:47 +0200 Subject: [PATCH 5/8] Add TODO for 3.0 --- .../datafactory/data_factory_dataset_azure_blob_resource.go | 1 + .../services/datafactory/data_factory_dataset_binary_resource.go | 1 + .../datafactory/data_factory_dataset_cosmosdb_sqlapi_resource.go | 1 + .../datafactory/data_factory_dataset_delimited_text_resource.go | 1 + .../services/datafactory/data_factory_dataset_http_resource.go | 1 + .../services/datafactory/data_factory_dataset_json_resource.go | 1 + .../services/datafactory/data_factory_dataset_mysql_resource.go | 1 + .../datafactory/data_factory_dataset_parquet_resource.go | 1 + .../datafactory/data_factory_dataset_postgresql_resource.go | 1 + .../datafactory/data_factory_dataset_snowflake_resource.go | 1 + .../data_factory_dataset_sql_server_table_resource.go | 1 + 11 files changed, 11 insertions(+) diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_azure_blob_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_azure_blob_resource.go index 21bdfadef3f5..8abbefb5404e 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_azure_blob_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_azure_blob_resource.go @@ -40,6 +40,7 @@ func resourceDataFactoryDatasetAzureBlob() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, + // TODO: replace with `data_factory_id` in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, Required: true, diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource.go index 3f9384ae62b6..fd43beddfb6d 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource.go @@ -42,6 +42,7 @@ func resourceDataFactoryDatasetBinary() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, + // TODO: replace with `data_factory_id` in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, Required: true, diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_cosmosdb_sqlapi_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_cosmosdb_sqlapi_resource.go index 9fef936d74a0..b1b186114d27 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_cosmosdb_sqlapi_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_cosmosdb_sqlapi_resource.go @@ -41,6 +41,7 @@ func resourceDataFactoryDatasetCosmosDbSQLAPI() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, + // TODO: replace with `data_factory_id` in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, Required: true, diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_delimited_text_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_delimited_text_resource.go index 40bec2bcaa11..2f1e628aeec9 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_delimited_text_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_delimited_text_resource.go @@ -42,6 +42,7 @@ func resourceDataFactoryDatasetDelimitedText() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, + // TODO: replace with `data_factory_id` in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, Required: true, diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_http_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_http_resource.go index d1f47d300d49..48bcb9bc8331 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_http_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_http_resource.go @@ -41,6 +41,7 @@ func resourceDataFactoryDatasetHTTP() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, + // TODO: replace with `data_factory_id` in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, Required: true, diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_json_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_json_resource.go index 7a1d7e98bb60..2f9ebfe05af5 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_json_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_json_resource.go @@ -41,6 +41,7 @@ func resourceDataFactoryDatasetJSON() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, + // TODO: replace with `data_factory_id` in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, Required: true, diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_mysql_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_mysql_resource.go index 177c7d50b683..c1b35e24fdc0 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_mysql_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_mysql_resource.go @@ -41,6 +41,7 @@ func resourceDataFactoryDatasetMySQL() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, + // TODO: replace with `data_factory_id` in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, Required: true, diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_parquet_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_parquet_resource.go index fd024f8a4908..74085ba6c9b1 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_parquet_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_parquet_resource.go @@ -42,6 +42,7 @@ func resourceDataFactoryDatasetParquet() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, + // TODO: replace with `data_factory_id` in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, Required: true, diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_postgresql_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_postgresql_resource.go index 8f77b0e0021e..17d0060181f4 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_postgresql_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_postgresql_resource.go @@ -41,6 +41,7 @@ func resourceDataFactoryDatasetPostgreSQL() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, + // TODO: replace with `data_factory_id` in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, Required: true, diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_snowflake_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_snowflake_resource.go index 569576c0d930..11d6fc49c4ab 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_snowflake_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_snowflake_resource.go @@ -41,6 +41,7 @@ func resourceDataFactoryDatasetSnowflake() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, + // TODO: replace with `data_factory_id` in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, Required: true, diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_sql_server_table_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_sql_server_table_resource.go index 602bf8e318f3..35132475b5e3 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_sql_server_table_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_sql_server_table_resource.go @@ -41,6 +41,7 @@ func resourceDataFactoryDatasetSQLServerTable() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, + // TODO: replace with `data_factory_id` in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, Required: true, From d5e87024117ac14b013cfe9dfd88c715b4fa82e6 Mon Sep 17 00:00:00 2001 From: Aris van Ommeren Date: Thu, 15 Jul 2021 22:30:36 +0200 Subject: [PATCH 6/8] Fix comments 2 --- .../services/datafactory/data_factory.go | 27 +++++------- .../data_factory_dataset_binary_resource.go | 42 +++++++------------ ...ta_factory_dataset_binary_resource_test.go | 2 +- 3 files changed, 28 insertions(+), 43 deletions(-) diff --git a/azurerm/internal/services/datafactory/data_factory.go b/azurerm/internal/services/datafactory/data_factory.go index b006a156d1b4..04bb8a319b9a 100644 --- a/azurerm/internal/services/datafactory/data_factory.go +++ b/azurerm/internal/services/datafactory/data_factory.go @@ -296,12 +296,10 @@ func expandDataFactoryDatasetSFTPServerLocation(d *pluginsdk.ResourceData) dataf return nil } props := sftpServerLocations[0].(map[string]interface{}) - path := props["path"].(string) - filename := props["filename"].(string) sftpServerLocation := datafactory.SftpLocation{ - FolderPath: path, - FileName: filename, + FolderPath: props["path"].(string), + FileName: props["filename"].(string), } return sftpServerLocation } @@ -311,15 +309,13 @@ func expandDataFactoryDatasetHttpServerLocation(d *pluginsdk.ResourceData) dataf if len(httpServerLocations) == 0 || httpServerLocations[0] == nil { return nil } + props := httpServerLocations[0].(map[string]interface{}) - relativeUrl := props["relative_url"].(string) - path := props["path"].(string) - filename := props["filename"].(string) httpServerLocation := datafactory.HTTPServerLocation{ - RelativeURL: relativeUrl, - FolderPath: path, - FileName: filename, + RelativeURL: props["relative_url"].(string), + FolderPath: props["path"].(string), + FileName: props["filename"].(string), } return httpServerLocation } @@ -329,15 +325,13 @@ func expandDataFactoryDatasetAzureBlobStorageLocation(d *pluginsdk.ResourceData) if len(azureBlobStorageLocations) == 0 || azureBlobStorageLocations[0] == nil { return nil } + props := azureBlobStorageLocations[0].(map[string]interface{}) - container := props["container"].(string) - path := props["path"].(string) - filename := props["filename"].(string) blobStorageLocation := datafactory.AzureBlobStorageLocation{ - Container: container, - FolderPath: path, - FileName: filename, + Container: props["container"].(string), + FolderPath: props["path"].(string), + FileName: props["filename"].(string), } return blobStorageLocation } @@ -347,6 +341,7 @@ func expandDataFactoryDatasetAzureBlobFSLocation(d *pluginsdk.ResourceData) data if len(azureBlobFsLocations) == 0 || azureBlobFsLocations[0] == nil { return nil } + props := azureBlobFsLocations[0].(map[string]interface{}) blobStorageLocation := datafactory.AzureBlobFSLocation{ diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource.go index fd43beddfb6d..59a429a78b9f 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource.go @@ -62,10 +62,9 @@ func resourceDataFactoryDatasetBinary() *pluginsdk.Resource { // Binary Dataset Specific Field "http_server_location": { - Type: pluginsdk.TypeList, - MaxItems: 1, - Optional: true, - // ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "azure_blob_storage_location"}, + Type: pluginsdk.TypeList, + MaxItems: 1, + Optional: true, ConflictsWith: []string{"azure_blob_storage_location", "sftp_server_location"}, Elem: &pluginsdk.Resource{ Schema: map[string]*pluginsdk.Schema{ @@ -89,10 +88,9 @@ func resourceDataFactoryDatasetBinary() *pluginsdk.Resource { }, "sftp_server_location": { - Type: pluginsdk.TypeList, - MaxItems: 1, - Optional: true, - // ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "azure_blob_storage_location"}, + Type: pluginsdk.TypeList, + MaxItems: 1, + Optional: true, ConflictsWith: []string{"azure_blob_storage_location", "http_server_location"}, Elem: &pluginsdk.Resource{ Schema: map[string]*pluginsdk.Schema{ @@ -111,10 +109,9 @@ func resourceDataFactoryDatasetBinary() *pluginsdk.Resource { // Binary Dataset Specific Field "azure_blob_storage_location": { - Type: pluginsdk.TypeList, - MaxItems: 1, - Optional: true, - // ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "azure_blob_storage_location"}, + Type: pluginsdk.TypeList, + MaxItems: 1, + Optional: true, ConflictsWith: []string{"http_server_location", "sftp_server_location"}, Elem: &pluginsdk.Resource{ Schema: map[string]*pluginsdk.Schema{ @@ -242,18 +239,13 @@ func resourceDataFactoryDatasetBinaryCreateUpdate(d *pluginsdk.ResourceData, met binaryDatasetProperties.Compression = expandDataFactoryDatasetCompression(d) } - linkedServiceName := d.Get("linked_service_name").(string) - linkedServiceType := "LinkedServiceReference" - linkedService := &datafactory.LinkedServiceReference{ - ReferenceName: &linkedServiceName, - Type: &linkedServiceType, - } - - description := d.Get("description").(string) binaryTableset := datafactory.BinaryDataset{ BinaryDatasetTypeProperties: &binaryDatasetProperties, - LinkedServiceName: linkedService, - Description: &description, + Description: utils.String(d.Get("description").(string)), + LinkedServiceName: &datafactory.LinkedServiceReference{ + ReferenceName: utils.String(d.Get("linked_service_name").(string)), + Type: utils.String("LinkedServiceReference"), + }, } if v, ok := d.GetOk("folder"); ok { @@ -286,8 +278,7 @@ func resourceDataFactoryDatasetBinaryCreateUpdate(d *pluginsdk.ResourceData, met return fmt.Errorf("creating/updating Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): %s", id.Name, id.FactoryName, id.ResourceGroup, err) } - _, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") - if err != nil { + if _, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, ""); err != nil { return fmt.Errorf("retrieving Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): %s", id.Name, id.FactoryName, id.ResourceGroup, err) } @@ -331,8 +322,7 @@ func resourceDataFactoryDatasetBinaryRead(d *pluginsdk.ResourceData, meta interf d.Set("description", binaryTable.Description) } - parameters := flattenDataFactoryParameters(binaryTable.Parameters) - if err := d.Set("parameters", parameters); err != nil { + if err := d.Set("parameters", flattenDataFactoryParameters(binaryTable.Parameters)); err != nil { return fmt.Errorf("setting `parameters`: %+v", err) } diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource_test.go b/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource_test.go index b718f53e49e2..b7a49693f7f1 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource_test.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource_test.go @@ -61,7 +61,7 @@ func TestAccDataFactoryDatasetBinary_sftp(t *testing.T) { }) } -func TestAccDataFactoryDatasetBinary_SftpComplete(t *testing.T) { +func TestAccDataFactoryDatasetBinary_sftpComplete(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_binary", "test") r := DatasetBinaryResource{} From 7a1b4a4d409ea298c00529e7ad8d53681fcd7765 Mon Sep 17 00:00:00 2001 From: Aris van Ommeren Date: Thu, 15 Jul 2021 22:37:28 +0200 Subject: [PATCH 7/8] Forgot 1 --- azurerm/internal/services/datafactory/data_factory.go | 1 + 1 file changed, 1 insertion(+) diff --git a/azurerm/internal/services/datafactory/data_factory.go b/azurerm/internal/services/datafactory/data_factory.go index 04bb8a319b9a..2401fd362d45 100644 --- a/azurerm/internal/services/datafactory/data_factory.go +++ b/azurerm/internal/services/datafactory/data_factory.go @@ -295,6 +295,7 @@ func expandDataFactoryDatasetSFTPServerLocation(d *pluginsdk.ResourceData) dataf if len(sftpServerLocations) == 0 || sftpServerLocations[0] == nil { return nil } + props := sftpServerLocations[0].(map[string]interface{}) sftpServerLocation := datafactory.SftpLocation{ From 04ef0635848d8335b1a1e7d82c6e6c1f63433f20 Mon Sep 17 00:00:00 2001 From: Aris van Ommeren Date: Thu, 15 Jul 2021 22:40:55 +0200 Subject: [PATCH 8/8] Extra fix --- .../datafactory/data_factory_dataset_binary_resource.go | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource.go index 59a429a78b9f..16de9701ec6b 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource.go @@ -360,10 +360,8 @@ func resourceDataFactoryDatasetBinaryRead(d *pluginsdk.ResourceData, meta interf } } - if folder := binaryTable.Folder; folder != nil { - if folder.Name != nil { - d.Set("folder", folder.Name) - } + if folder := binaryTable.Folder; folder != nil && folder.Name != nil { + d.Set("folder", folder.Name) } return nil