diff --git a/azurerm/internal/services/datafactory/data_factory.go b/azurerm/internal/services/datafactory/data_factory.go index dc01cf8f1bc0..2401fd362d45 100644 --- a/azurerm/internal/services/datafactory/data_factory.go +++ b/azurerm/internal/services/datafactory/data_factory.go @@ -283,33 +283,56 @@ func expandDataFactoryDatasetLocation(d *pluginsdk.ResourceData) datafactory.Bas return expandDataFactoryDatasetAzureBlobFSLocation(d) } + if _, ok := d.GetOk("sftp_server_location"); ok { + return expandDataFactoryDatasetSFTPServerLocation(d) + } + return nil } +func expandDataFactoryDatasetSFTPServerLocation(d *pluginsdk.ResourceData) datafactory.BasicDatasetLocation { + sftpServerLocations := d.Get("sftp_server_location").([]interface{}) + if len(sftpServerLocations) == 0 || sftpServerLocations[0] == nil { + return nil + } + + props := sftpServerLocations[0].(map[string]interface{}) + + sftpServerLocation := datafactory.SftpLocation{ + FolderPath: props["path"].(string), + FileName: props["filename"].(string), + } + return sftpServerLocation +} + func expandDataFactoryDatasetHttpServerLocation(d *pluginsdk.ResourceData) datafactory.BasicDatasetLocation { - props := d.Get("http_server_location").([]interface{})[0].(map[string]interface{}) - relativeUrl := props["relative_url"].(string) - path := props["path"].(string) - filename := props["filename"].(string) + httpServerLocations := d.Get("http_server_location").([]interface{}) + if len(httpServerLocations) == 0 || httpServerLocations[0] == nil { + return nil + } + + props := httpServerLocations[0].(map[string]interface{}) httpServerLocation := datafactory.HTTPServerLocation{ - RelativeURL: relativeUrl, - FolderPath: path, - FileName: filename, + RelativeURL: props["relative_url"].(string), + FolderPath: props["path"].(string), + FileName: props["filename"].(string), } return httpServerLocation } func expandDataFactoryDatasetAzureBlobStorageLocation(d *pluginsdk.ResourceData) datafactory.BasicDatasetLocation { - props := d.Get("azure_blob_storage_location").([]interface{})[0].(map[string]interface{}) - container := props["container"].(string) - path := props["path"].(string) - filename := props["filename"].(string) + azureBlobStorageLocations := d.Get("azure_blob_storage_location").([]interface{}) + if len(azureBlobStorageLocations) == 0 || azureBlobStorageLocations[0] == nil { + return nil + } + + props := azureBlobStorageLocations[0].(map[string]interface{}) blobStorageLocation := datafactory.AzureBlobStorageLocation{ - Container: container, - FolderPath: path, - FileName: filename, + Container: props["container"].(string), + FolderPath: props["path"].(string), + FileName: props["filename"].(string), } return blobStorageLocation } @@ -319,6 +342,7 @@ func expandDataFactoryDatasetAzureBlobFSLocation(d *pluginsdk.ResourceData) data if len(azureBlobFsLocations) == 0 || azureBlobFsLocations[0] == nil { return nil } + props := azureBlobFsLocations[0].(map[string]interface{}) blobStorageLocation := datafactory.AzureBlobFSLocation{ @@ -403,3 +427,95 @@ func flattenDataFactoryDatasetAzureBlobFSLocation(input *datafactory.AzureBlobFS }, } } +func flattenDataFactoryDatasetSFTPLocation(input *datafactory.SftpLocation) []interface{} { + if input == nil { + return nil + } + result := make(map[string]interface{}) + + if input.FolderPath != nil { + result["path"] = input.FolderPath + } + if input.FileName != nil { + result["filename"] = input.FileName + } + + return []interface{}{result} +} + +func flattenDataFactoryDatasetCompression(input datafactory.BasicDatasetCompression) []interface{} { + if input == nil { + return nil + } + result := make(map[string]interface{}) + + if compression, ok := input.AsDatasetBZip2Compression(); ok { + result["type"] = compression.Type + } + if compression, ok := input.AsDatasetDeflateCompression(); ok { + result["type"] = compression.Type + } + if compression, ok := input.AsDatasetGZipCompression(); ok { + result["type"] = compression.Type + result["level"] = compression.Level + } + if compression, ok := input.AsDatasetTarCompression(); ok { + result["type"] = compression.Type + } + if compression, ok := input.AsDatasetTarGZipCompression(); ok { + result["type"] = compression.Type + result["level"] = compression.Level + } + if compression, ok := input.AsDatasetZipDeflateCompression(); ok { + result["type"] = compression.Type + result["level"] = compression.Level + } + + return []interface{}{result} +} + +func expandDataFactoryDatasetCompression(d *pluginsdk.ResourceData) datafactory.BasicDatasetCompression { + compression := d.Get("compression").([]interface{}) + if len(compression) == 0 || compression[0] == nil { + return nil + } + props := compression[0].(map[string]interface{}) + level := props["level"].(string) + compressionType := props["type"].(string) + + if datafactory.TypeBasicDatasetCompression(compressionType) == datafactory.TypeBasicDatasetCompressionTypeBZip2 { + return datafactory.DatasetBZip2Compression{ + Type: datafactory.TypeBasicDatasetCompression(compressionType), + } + } + if datafactory.TypeBasicDatasetCompression(compressionType) == datafactory.TypeBasicDatasetCompressionTypeDeflate { + return datafactory.DatasetDeflateCompression{ + Type: datafactory.TypeBasicDatasetCompression(compressionType), + } + } + if datafactory.TypeBasicDatasetCompression(compressionType) == datafactory.TypeBasicDatasetCompressionTypeGZip { + return datafactory.DatasetGZipCompression{ + Type: datafactory.TypeBasicDatasetCompression(compressionType), + Level: level, + } + } + if datafactory.TypeBasicDatasetCompression(compressionType) == datafactory.TypeBasicDatasetCompressionTypeTar { + return datafactory.DatasetTarCompression{ + Type: datafactory.TypeBasicDatasetCompression(compressionType), + } + } + if datafactory.TypeBasicDatasetCompression(compressionType) == datafactory.TypeBasicDatasetCompressionTypeTarGZip { + return datafactory.DatasetTarGZipCompression{ + Type: datafactory.TypeBasicDatasetCompression(compressionType), + Level: level, + } + } + if datafactory.TypeBasicDatasetCompression(compressionType) == datafactory.TypeBasicDatasetCompressionTypeZipDeflate { + return datafactory.DatasetZipDeflateCompression{ + Type: datafactory.TypeBasicDatasetCompression(compressionType), + Level: level, + } + } + + return nil +} diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_azure_blob_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_azure_blob_resource.go index 21bdfadef3f5..8abbefb5404e 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_azure_blob_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_azure_blob_resource.go @@ -40,6 +40,7 @@ func resourceDataFactoryDatasetAzureBlob() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, + // TODO: replace with `data_factory_id` in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, Required: true, diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource.go new file mode 100644 index 000000000000..16de9701ec6b --- /dev/null +++ b/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource.go @@ -0,0 +1,388 @@ +package datafactory + +import ( + "fmt" + "time" + + "github.com/Azure/azure-sdk-for-go/services/datafactory/mgmt/2018-06-01/datafactory" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datafactory/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datafactory/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/pluginsdk" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceDataFactoryDatasetBinary() *pluginsdk.Resource { + return &pluginsdk.Resource{ + Create: resourceDataFactoryDatasetBinaryCreateUpdate, + Read: resourceDataFactoryDatasetBinaryRead, + Update: resourceDataFactoryDatasetBinaryCreateUpdate, + Delete: resourceDataFactoryDatasetBinaryDelete, + Importer: pluginsdk.ImporterValidatingResourceId(func(id string) error { + _, err := parse.DataSetID(id) + return err + }), + + Timeouts: &pluginsdk.ResourceTimeout{ + Create: pluginsdk.DefaultTimeout(30 * time.Minute), + Read: pluginsdk.DefaultTimeout(5 * time.Minute), + Update: pluginsdk.DefaultTimeout(30 * time.Minute), + Delete: pluginsdk.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*pluginsdk.Schema{ + "name": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.LinkedServiceDatasetName, + }, + + // TODO: replace with `data_factory_id` in 3.0 + "data_factory_name": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DataFactoryName(), + }, + + // There's a bug in the Azure API where this is returned in lower-case + // BUG: https://github.com/Azure/azure-rest-api-specs/issues/5788 + "resource_group_name": azure.SchemaResourceGroupNameDiffSuppress(), + + "linked_service_name": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + // Binary Dataset Specific Field + "http_server_location": { + Type: pluginsdk.TypeList, + MaxItems: 1, + Optional: true, + ConflictsWith: []string{"azure_blob_storage_location", "sftp_server_location"}, + Elem: &pluginsdk.Resource{ + Schema: map[string]*pluginsdk.Schema{ + "relative_url": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "path": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "filename": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + }, + }, + + "sftp_server_location": { + Type: pluginsdk.TypeList, + MaxItems: 1, + Optional: true, + ConflictsWith: []string{"azure_blob_storage_location", "http_server_location"}, + Elem: &pluginsdk.Resource{ + Schema: map[string]*pluginsdk.Schema{ + "path": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "filename": { + Type: pluginsdk.TypeString, + Required: true, + }, + }, + }, + }, + + // Binary Dataset Specific Field + "azure_blob_storage_location": { + Type: pluginsdk.TypeList, + MaxItems: 1, + Optional: true, + ConflictsWith: []string{"http_server_location", "sftp_server_location"}, + Elem: &pluginsdk.Resource{ + Schema: map[string]*pluginsdk.Schema{ + "container": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "path": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + "filename": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + }, + }, + + "parameters": { + Type: pluginsdk.TypeMap, + Optional: true, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeString, + }, + }, + + "description": { + Type: pluginsdk.TypeString, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "annotations": { + Type: pluginsdk.TypeList, + Optional: true, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeString, + }, + }, + + "folder": { + Type: pluginsdk.TypeString, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "additional_properties": { + Type: pluginsdk.TypeMap, + Optional: true, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeString, + }, + }, + + "compression": { + Type: pluginsdk.TypeList, + MaxItems: 1, + Optional: true, + Elem: &pluginsdk.Resource{ + Schema: map[string]*pluginsdk.Schema{ + // TarGZip, GZip, ZipDeflate + "level": { + Type: pluginsdk.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{ + "Optimal", + "Fastest", + }, false), + }, + // SFTP Specific field + "type": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{ + string(datafactory.TypeBasicDatasetCompressionTypeBZip2), + string(datafactory.TypeBasicDatasetCompressionTypeDeflate), + string(datafactory.TypeBasicDatasetCompressionTypeGZip), + string(datafactory.TypeBasicDatasetCompressionTypeTar), + string(datafactory.TypeBasicDatasetCompressionTypeTarGZip), + string(datafactory.TypeBasicDatasetCompressionTypeZipDeflate), + }, false), + }, + }, + }, + }, + }, + } +} + +func resourceDataFactoryDatasetBinaryCreateUpdate(d *pluginsdk.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataFactory.DatasetClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + id := parse.NewDataSetID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string), d.Get("name").(string)) + + if d.IsNewResource() { + existing, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for presence of existing Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): %s", id.Name, id.FactoryName, id.ResourceGroup, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_data_factory_dataset_binary", *existing.ID) + } + } + + location := expandDataFactoryDatasetLocation(d) + if location == nil { + return fmt.Errorf("one of `http_server_location`, `azure_blob_storage_location` or `sftp_server_location`, must be specified to create a DataFactory Binary Dataset") + } + + binaryDatasetProperties := datafactory.BinaryDatasetTypeProperties{ + Location: location, + } + + if _, ok := d.GetOk("compression"); ok { + binaryDatasetProperties.Compression = expandDataFactoryDatasetCompression(d) + } + + binaryTableset := datafactory.BinaryDataset{ + BinaryDatasetTypeProperties: &binaryDatasetProperties, + Description: utils.String(d.Get("description").(string)), + LinkedServiceName: &datafactory.LinkedServiceReference{ + ReferenceName: utils.String(d.Get("linked_service_name").(string)), + Type: utils.String("LinkedServiceReference"), + }, + } + + if v, ok := d.GetOk("folder"); ok { + name := v.(string) + binaryTableset.Folder = &datafactory.DatasetFolder{ + Name: &name, + } + } + + if v, ok := d.GetOk("parameters"); ok { + binaryTableset.Parameters = expandDataFactoryParameters(v.(map[string]interface{})) + } + + if v, ok := d.GetOk("annotations"); ok { + annotations := v.([]interface{}) + binaryTableset.Annotations = &annotations + } + + if v, ok := d.GetOk("additional_properties"); ok { + binaryTableset.AdditionalProperties = v.(map[string]interface{}) + } + + datasetType := string(datafactory.TypeBasicDatasetTypeBinary) + dataset := datafactory.DatasetResource{ + Properties: &binaryTableset, + Type: &datasetType, + } + + if _, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.FactoryName, id.Name, dataset, ""); err != nil { + return fmt.Errorf("creating/updating Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): %s", id.Name, id.FactoryName, id.ResourceGroup, err) + } + + if _, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, ""); err != nil { + return fmt.Errorf("retrieving Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): %s", id.Name, id.FactoryName, id.ResourceGroup, err) + } + + d.SetId(id.ID()) + + return resourceDataFactoryDatasetBinaryRead(d, meta) +} + +func resourceDataFactoryDatasetBinaryRead(d *pluginsdk.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataFactory.DatasetClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DataSetID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + d.SetId("") + return nil + } + + return fmt.Errorf("retrieving Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): %s", id.Name, id.FactoryName, id.ResourceGroup, err) + } + + d.Set("name", id.Name) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("data_factory_name", id.FactoryName) + + binaryTable, ok := resp.Properties.AsBinaryDataset() + if !ok { + return fmt.Errorf("classifiying Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): Expected: %q Received: %q", id.Name, id.FactoryName, id.ResourceGroup, datafactory.TypeBasicDatasetTypeBinary, *resp.Type) + } + + d.Set("additional_properties", binaryTable.AdditionalProperties) + + if binaryTable.Description != nil { + d.Set("description", binaryTable.Description) + } + + if err := d.Set("parameters", flattenDataFactoryParameters(binaryTable.Parameters)); err != nil { + return fmt.Errorf("setting `parameters`: %+v", err) + } + + annotations := flattenDataFactoryAnnotations(binaryTable.Annotations) + if err := d.Set("annotations", annotations); err != nil { + return fmt.Errorf("setting `annotations`: %+v", err) + } + + if linkedService := binaryTable.LinkedServiceName; linkedService != nil { + if linkedService.ReferenceName != nil { + d.Set("linked_service_name", linkedService.ReferenceName) + } + } + + if properties := binaryTable.BinaryDatasetTypeProperties; properties != nil { + if httpServerLocation, ok := properties.Location.AsHTTPServerLocation(); ok { + if err := d.Set("http_server_location", flattenDataFactoryDatasetHTTPServerLocation(httpServerLocation)); err != nil { + return fmt.Errorf("setting `http_server_location` for Data Factory Binary Dataset %s", err) + } + } + if azureBlobStorageLocation, ok := properties.Location.AsAzureBlobStorageLocation(); ok { + if err := d.Set("azure_blob_storage_location", flattenDataFactoryDatasetAzureBlobStorageLocation(azureBlobStorageLocation)); err != nil { + return fmt.Errorf("setting `azure_blob_storage_location` for Data Factory Binary Dataset %s", err) + } + } + if sftpLocation, ok := properties.Location.AsSftpLocation(); ok { + if err := d.Set("sftp_server_location", flattenDataFactoryDatasetSFTPLocation(sftpLocation)); err != nil { + return fmt.Errorf("setting `sftp_server_location` for Data Factory Binary Dataset %s", err) + } + } + + compression := flattenDataFactoryDatasetCompression(properties.Compression) + if err := d.Set("compression", compression); err != nil { + return fmt.Errorf("setting `compression`: %+v", err) + } + } + + if folder := binaryTable.Folder; folder != nil && folder.Name != nil { + d.Set("folder", folder.Name) + } + + return nil +} + +func resourceDataFactoryDatasetBinaryDelete(d *pluginsdk.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataFactory.DatasetClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DataSetID(d.Id()) + if err != nil { + return err + } + + response, err := client.Delete(ctx, id.ResourceGroup, id.FactoryName, id.Name) + if err != nil { + if !utils.ResponseWasNotFound(response) { + return fmt.Errorf("deleting Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): %s", id.Name, id.FactoryName, id.ResourceGroup, err) + } + } + + return nil +} diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource_test.go b/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource_test.go new file mode 100644 index 000000000000..b7a49693f7f1 --- /dev/null +++ b/azurerm/internal/services/datafactory/data_factory_dataset_binary_resource_test.go @@ -0,0 +1,298 @@ +package datafactory_test + +import ( + "context" + "fmt" + "testing" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/pluginsdk" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +type DatasetBinaryResource struct { +} + +func TestAccDataFactoryDatasetBinary_blob(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_binary", "test") + r := DatasetBinaryResource{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.blob(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataFactoryDatasetBinary_http(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_binary", "test") + r := DatasetBinaryResource{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.http(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataFactoryDatasetBinary_sftp(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_binary", "test") + r := DatasetBinaryResource{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.sftp(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataFactoryDatasetBinary_sftpComplete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_binary", "test") + r := DatasetBinaryResource{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.sftp_complete(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (t DatasetBinaryResource) Exists(ctx context.Context, clients *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { + id, err := azure.ParseAzureResourceID(state.ID) + if err != nil { + return nil, err + } + resourceGroup := id.ResourceGroup + dataFactoryName := id.Path["factories"] + name := id.Path["datasets"] + + resp, err := clients.DataFactory.DatasetClient.Get(ctx, resourceGroup, dataFactoryName, name, "") + if err != nil { + return nil, fmt.Errorf("reading Data Factory Dataset Binary (%s): %+v", id, err) + } + + return utils.Bool(resp.ID != nil), nil +} + +func (DatasetBinaryResource) blob(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-df-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestdf%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + account_tier = "Standard" + account_replication_type = "GRS" +} + +resource "azurerm_storage_container" "test" { + name = "content" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + + +resource "azurerm_data_factory_linked_service_azure_blob_storage" "test" { + name = "acctestlsblob%d" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + connection_string = azurerm_storage_account.test.primary_connection_string +} + +resource "azurerm_data_factory_dataset_binary" "test" { + name = "acctestds%d" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + linked_service_name = azurerm_data_factory_linked_service_azure_blob_storage.test.name + + azure_blob_storage_location { + container = azurerm_storage_container.test.name + path = "foo/bar/" + filename = "foo.txt" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (DatasetBinaryResource) http(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-df-%d" + location = "%s" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_data_factory_linked_service_web" "test" { + name = "acctestlsweb%d" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + authentication_type = "Anonymous" + url = "https://www.bing.com" +} + +resource "azurerm_data_factory_dataset_binary" "test" { + name = "acctestds%d" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + linked_service_name = azurerm_data_factory_linked_service_web.test.name + + http_server_location { + relative_url = "/fizz/buzz/" + path = "foo/bar/" + filename = "foo.txt" + } + + compression { + type = "GZip" + level = "Optimal" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (DatasetBinaryResource) sftp(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-df-%d" + location = "%s" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_data_factory_linked_service_sftp" "test" { + name = "acctestlssftp%d" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + authentication_type = "Basic" + host = "http://www.bing.com" + port = 22 + username = "foo" + password = "bar" +} + +resource "azurerm_data_factory_dataset_binary" "test" { + name = "acctestds%d" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + linked_service_name = azurerm_data_factory_linked_service_sftp.test.name + + sftp_server_location { + path = "/test/" + filename = "**" + } +} + +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func (DatasetBinaryResource) sftp_complete(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-df-%d" + location = "%s" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_data_factory_linked_service_sftp" "test" { + name = "acctestlssftp%d" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + authentication_type = "Basic" + host = "http://www.bing.com" + port = 22 + username = "foo" + password = "bar" +} + +resource "azurerm_data_factory_dataset_binary" "test" { + name = "acctestds%d" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + linked_service_name = azurerm_data_factory_linked_service_sftp.test.name + + sftp_server_location { + path = "/test/" + filename = "**" + } + + compression { + type = "GZip" + level = "Fastest" + } + + description = "test description 2" + annotations = ["test1", "test2"] + folder = "testFolder" + + parameters = { + foo = "test1" + bar = "test2" + buzz = "test3" + } + + additional_properties = { + foo = "test1" + } +} + +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_cosmosdb_sqlapi_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_cosmosdb_sqlapi_resource.go index 9fef936d74a0..b1b186114d27 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_cosmosdb_sqlapi_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_cosmosdb_sqlapi_resource.go @@ -41,6 +41,7 @@ func resourceDataFactoryDatasetCosmosDbSQLAPI() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, + // TODO: replace with `data_factory_id` in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, Required: true, diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_delimited_text_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_delimited_text_resource.go index 40bec2bcaa11..2f1e628aeec9 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_delimited_text_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_delimited_text_resource.go @@ -42,6 +42,7 @@ func resourceDataFactoryDatasetDelimitedText() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, + // TODO: replace with `data_factory_id` in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, Required: true, diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_http_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_http_resource.go index d1f47d300d49..48bcb9bc8331 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_http_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_http_resource.go @@ -41,6 +41,7 @@ func resourceDataFactoryDatasetHTTP() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, + // TODO: replace with `data_factory_id` in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, Required: true, diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_json_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_json_resource.go index 7a1d7e98bb60..2f9ebfe05af5 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_json_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_json_resource.go @@ -41,6 +41,7 @@ func resourceDataFactoryDatasetJSON() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, + // TODO: replace with `data_factory_id` in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, Required: true, diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_mysql_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_mysql_resource.go index 177c7d50b683..c1b35e24fdc0 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_mysql_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_mysql_resource.go @@ -41,6 +41,7 @@ func resourceDataFactoryDatasetMySQL() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, + // TODO: replace with `data_factory_id` in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, Required: true, diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_parquet_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_parquet_resource.go index fd024f8a4908..74085ba6c9b1 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_parquet_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_parquet_resource.go @@ -42,6 +42,7 @@ func resourceDataFactoryDatasetParquet() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, + // TODO: replace with `data_factory_id` in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, Required: true, diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_postgresql_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_postgresql_resource.go index 8f77b0e0021e..17d0060181f4 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_postgresql_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_postgresql_resource.go @@ -41,6 +41,7 @@ func resourceDataFactoryDatasetPostgreSQL() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, + // TODO: replace with `data_factory_id` in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, Required: true, diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_snowflake_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_snowflake_resource.go index 569576c0d930..11d6fc49c4ab 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_snowflake_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_snowflake_resource.go @@ -41,6 +41,7 @@ func resourceDataFactoryDatasetSnowflake() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, + // TODO: replace with `data_factory_id` in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, Required: true, diff --git a/azurerm/internal/services/datafactory/data_factory_dataset_sql_server_table_resource.go b/azurerm/internal/services/datafactory/data_factory_dataset_sql_server_table_resource.go index 602bf8e318f3..35132475b5e3 100644 --- a/azurerm/internal/services/datafactory/data_factory_dataset_sql_server_table_resource.go +++ b/azurerm/internal/services/datafactory/data_factory_dataset_sql_server_table_resource.go @@ -41,6 +41,7 @@ func resourceDataFactoryDatasetSQLServerTable() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, + // TODO: replace with `data_factory_id` in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, Required: true, diff --git a/azurerm/internal/services/datafactory/registration.go b/azurerm/internal/services/datafactory/registration.go index 1bac8a2cc6be..7b43b643191b 100644 --- a/azurerm/internal/services/datafactory/registration.go +++ b/azurerm/internal/services/datafactory/registration.go @@ -30,6 +30,7 @@ func (r Registration) SupportedResources() map[string]*pluginsdk.Resource { return map[string]*pluginsdk.Resource{ "azurerm_data_factory": resourceDataFactory(), "azurerm_data_factory_dataset_azure_blob": resourceDataFactoryDatasetAzureBlob(), + "azurerm_data_factory_dataset_binary": resourceDataFactoryDatasetBinary(), "azurerm_data_factory_dataset_cosmosdb_sqlapi": resourceDataFactoryDatasetCosmosDbSQLAPI(), "azurerm_data_factory_dataset_delimited_text": resourceDataFactoryDatasetDelimitedText(), "azurerm_data_factory_dataset_http": resourceDataFactoryDatasetHTTP(), diff --git a/website/docs/r/data_factory_dataset_binary.html.markdown b/website/docs/r/data_factory_dataset_binary.html.markdown new file mode 100644 index 000000000000..bba122551f1f --- /dev/null +++ b/website/docs/r/data_factory_dataset_binary.html.markdown @@ -0,0 +1,143 @@ +--- +subcategory: "Data Factory" +layout: "azurerm" +page_title: "Azure Resource Manager: azurerm_data_factory_dataset_binary" +description: |- + Manages a Data Factory Binary Dataset inside an Azure Data Factory. +--- + +# azurerm_data_factory_dataset_binary + +Manages a Data Factory Binary Dataset inside an Azure Data Factory. + +## Example Usage + +```hcl +resource "azurerm_resource_group" "example" { + name = "example" + location = "West Europe" +} + +resource "azurerm_data_factory" "example" { + name = "example" + location = azurerm_resource_group.example.location + resource_group_name = azurerm_resource_group.example.name +} + +resource "azurerm_data_factory_linked_service_sftp" "example" { + name = "example" + resource_group_name = azurerm_resource_group.example.name + data_factory_name = azurerm_data_factory.example.name + + authentication_type = "Basic" + host = "http://www.bing.com" + port = 22 + username = "foo" + password = "bar" +} + +resource "azurerm_data_factory_dataset_binary" "example" { + name = "example" + resource_group_name = azurerm_resource_group.example.name + data_factory_name = azurerm_data_factory.example.name + linked_service_name = azurerm_data_factory_linked_service_sftp.example.name + + sftp_server_location { + path = "/test/" + filename = "**" + } +} +``` + +## Arguments Reference + +The following arguments are supported: + +* `name` - (Required) Specifies the name of the Data Factory Binary Dataset. Changing this forces a new resource to be created. Must be globally unique. See the [Microsoft documentation](https://docs.microsoft.com/en-us/azure/data-factory/naming-rules) for all restrictions. + +* `data_factory_name` - (Required) The Data Factory name in which to associate the Binary Dataset with. Changing this forces a new resource. + +* `linked_service_name` - (Required) The Data Factory Linked Service name in which to associate the Binary Dataset with. + +* `resource_group_name` - (Required) The name of the Resource Group where the Data Factory should exist. Changing this forces a new Data Factory Binary Dataset to be created. + +--- + +* `additional_properties` - (Optional) A map of additional properties to associate with the Data Factory Binary Dataset. + +* `annotations` - (Optional) List of tags that can be used for describing the Data Factory Binary Dataset. + +* `compression` - (Optional) A `compression` block as defined below. + +* `description` - (Optional) The description for the Data Factory Dataset. + +* `folder` - (Optional) The folder that this Dataset is in. If not specified, the Dataset will appear at the root level. + +* `parameters` - (Optional) Specifies a list of parameters to associate with the Data Factory Binary Dataset. + +The following supported locations for a Binary Dataset. One of these should be specified: + +* `http_server_location` - (Optional) A `http_server_location` block as defined below. + +* `azure_blob_storage_location` - (Optional) A `azure_blob_storage_location` block as defined below. + +* `sftp_server_location` - (Optional) A `sftp_server_location` block as defined below. +--- + +A `compression` block supports the following: + +* `type` - (Required) The type of compression used during transport. + +* `level` - (Optional) The level of compression. Possible values are `Fastest` and `Optimal`. + +--- + +A `http_server_location` block supports the following: + +* `relative_url` - (Required) The base URL to the web server hosting the file. + +* `path` - (Required) The folder path to the file on the web server. + +* `filename` - (Required) The filename of the file on the web server. + +--- + +A `azure_blob_storage_location` block supports the following: + +* `container` - (Required) The container on the Azure Blob Storage Account hosting the file. + +* `path` - (Required) The folder path to the file on the web server. + +* `filename` - (Required) The filename of the file on the web server. + +--- + +A `sftp_server_location` block supports the following: + +* `path` - (Required) The folder path to the file on the SFTP server. + +* `filename` - (Required) The filename of the file on the SFTP server. + + +## Attributes Reference + +In addition to the Arguments listed above - the following Attributes are exported: + +* `id` - The ID of the Data Factory Dataset. + +## Timeouts + +The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/docs/configuration/resources.html#timeouts) for certain actions: + +* `create` - (Defaults to 30 minutes) Used when creating the Data Factory Dataset. +* `read` - (Defaults to 5 minutes) Used when retrieving the Data Factory Dataset. +* `update` - (Defaults to 30 minutes) Used when updating the Data Factory Dataset. +* `delete` - (Defaults to 30 minutes) Used when deleting the Data Factory Dataset. + +## Import + +Data Factorie Binary Datasets can be imported using the `resource id`, e.g. + +```shell +terraform import azurerm_data_factory_dataset_binary.example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/datasets/example +```