diff --git a/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt b/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt
index 9b73ecdf44e8..385b2ebb1e97 100644
--- a/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt
+++ b/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt
@@ -3,12 +3,12 @@ AutoRest installed successfully.
Commencing code generation
Generating CSharp code
Executing AutoRest command
-cmd.exe /c autorest.cmd https://github.com/Azure/azure-rest-api-specs/blob/master/specification/datafactory/resource-manager/readme.md --csharp --version=latest --reflect-api-versions --tag=package-2018-06 --csharp-sdks-folder=D:\GitHub\azure-sdk-for-net\sdk
-2019-09-27 21:50:57 UTC
+cmd.exe /c autorest.cmd https://github.com/Azure/azure-rest-api-specs/blob/master/specification/datafactory/resource-manager/readme.md --csharp --version=latest --reflect-api-versions --tag=package-2018-06 --csharp-sdks-folder=D:\Source\adf0929\azure-sdk-for-net\sdk
+2019-09-29 05:46:14 UTC
Azure-rest-api-specs repository information
GitHub fork: Azure
Branch: master
-Commit: dce1ac7d8caf0f46741b41ec65f2d1710f0b8c2c
+Commit: 9aaa1b6b1ceddfe31e03867fdfa1cedebb99185d
AutoRest information
Requested version: latest
Bootstrapper version: autorest@2.0.4283
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureFileStorageLinkedService.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureFileStorageLinkedService.cs
new file mode 100644
index 000000000000..dae5ead4808f
--- /dev/null
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureFileStorageLinkedService.cs
@@ -0,0 +1,114 @@
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License. See License.txt in the project root for
+// license information.
+//
+// Code generated by Microsoft (R) AutoRest Code Generator.
+// Changes may cause incorrect behavior and will be lost if the code is
+// regenerated.
+//
+
+namespace Microsoft.Azure.Management.DataFactory.Models
+{
+ using Microsoft.Rest;
+ using Microsoft.Rest.Serialization;
+ using Newtonsoft.Json;
+ using System.Collections;
+ using System.Collections.Generic;
+ using System.Linq;
+
+ ///
+ /// Azure File Storage linked service.
+ ///
+ [Newtonsoft.Json.JsonObject("AzureFileStorage")]
+ [Rest.Serialization.JsonTransformation]
+ public partial class AzureFileStorageLinkedService : LinkedService
+ {
+ ///
+ /// Initializes a new instance of the AzureFileStorageLinkedService
+ /// class.
+ ///
+ public AzureFileStorageLinkedService()
+ {
+ CustomInit();
+ }
+
+ ///
+ /// Initializes a new instance of the AzureFileStorageLinkedService
+ /// class.
+ ///
+ /// Host name of the server. Type: string (or
+ /// Expression with resultType string).
+ /// Unmatched properties from the
+ /// message are deserialized this collection
+ /// The integration runtime reference.
+ /// Linked service description.
+ /// Parameters for linked service.
+ /// List of tags that can be used for
+ /// describing the linked service.
+ /// User ID to logon the server. Type: string (or
+ /// Expression with resultType string).
+ /// Password to logon the server.
+ /// The encrypted credential used for
+ /// authentication. Credentials are encrypted using the integration
+ /// runtime credential manager. Type: string (or Expression with
+ /// resultType string).
+ public AzureFileStorageLinkedService(object host, IDictionary additionalProperties = default(IDictionary), IntegrationRuntimeReference connectVia = default(IntegrationRuntimeReference), string description = default(string), IDictionary parameters = default(IDictionary), IList annotations = default(IList), object userId = default(object), SecretBase password = default(SecretBase), object encryptedCredential = default(object))
+ : base(additionalProperties, connectVia, description, parameters, annotations)
+ {
+ Host = host;
+ UserId = userId;
+ Password = password;
+ EncryptedCredential = encryptedCredential;
+ CustomInit();
+ }
+
+ ///
+ /// An initialization method that performs custom operations like setting defaults
+ ///
+ partial void CustomInit();
+
+ ///
+ /// Gets or sets host name of the server. Type: string (or Expression
+ /// with resultType string).
+ ///
+ [JsonProperty(PropertyName = "typeProperties.host")]
+ public object Host { get; set; }
+
+ ///
+ /// Gets or sets user ID to logon the server. Type: string (or
+ /// Expression with resultType string).
+ ///
+ [JsonProperty(PropertyName = "typeProperties.userId")]
+ public object UserId { get; set; }
+
+ ///
+ /// Gets or sets password to logon the server.
+ ///
+ [JsonProperty(PropertyName = "typeProperties.password")]
+ public SecretBase Password { get; set; }
+
+ ///
+ /// Gets or sets the encrypted credential used for authentication.
+ /// Credentials are encrypted using the integration runtime credential
+ /// manager. Type: string (or Expression with resultType string).
+ ///
+ [JsonProperty(PropertyName = "typeProperties.encryptedCredential")]
+ public object EncryptedCredential { get; set; }
+
+ ///
+ /// Validate the object.
+ ///
+ ///
+ /// Thrown if validation fails
+ ///
+ public override void Validate()
+ {
+ base.Validate();
+ if (Host == null)
+ {
+ throw new ValidationException(ValidationRules.CannotBeNull, "Host");
+ }
+ }
+ }
+}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureFileStorageLocation.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureFileStorageLocation.cs
new file mode 100644
index 000000000000..6bdbafd55536
--- /dev/null
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureFileStorageLocation.cs
@@ -0,0 +1,62 @@
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License. See License.txt in the project root for
+// license information.
+//
+// Code generated by Microsoft (R) AutoRest Code Generator.
+// Changes may cause incorrect behavior and will be lost if the code is
+// regenerated.
+//
+
+namespace Microsoft.Azure.Management.DataFactory.Models
+{
+ using System.Collections;
+ using System.Collections.Generic;
+ using System.Linq;
+
+ ///
+ /// The location of file server dataset.
+ ///
+ public partial class AzureFileStorageLocation : DatasetLocation
+ {
+ ///
+ /// Initializes a new instance of the AzureFileStorageLocation class.
+ ///
+ public AzureFileStorageLocation()
+ {
+ CustomInit();
+ }
+
+ ///
+ /// Initializes a new instance of the AzureFileStorageLocation class.
+ ///
+ /// Type of dataset storage location.
+ /// Unmatched properties from the
+ /// message are deserialized this collection
+ /// Specify the folder path of dataset. Type:
+ /// string (or Expression with resultType string)
+ /// Specify the file name of dataset. Type:
+ /// string (or Expression with resultType string).
+ public AzureFileStorageLocation(string type, IDictionary additionalProperties = default(IDictionary), object folderPath = default(object), object fileName = default(object))
+ : base(type, additionalProperties, folderPath, fileName)
+ {
+ CustomInit();
+ }
+
+ ///
+ /// An initialization method that performs custom operations like setting defaults
+ ///
+ partial void CustomInit();
+
+ ///
+ /// Validate the object.
+ ///
+ ///
+ /// Thrown if validation fails
+ ///
+ public override void Validate()
+ {
+ base.Validate();
+ }
+ }
+}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureFileStorageReadSettings.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureFileStorageReadSettings.cs
new file mode 100644
index 000000000000..65f4f6d72d80
--- /dev/null
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureFileStorageReadSettings.cs
@@ -0,0 +1,128 @@
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License. See License.txt in the project root for
+// license information.
+//
+// Code generated by Microsoft (R) AutoRest Code Generator.
+// Changes may cause incorrect behavior and will be lost if the code is
+// regenerated.
+//
+
+namespace Microsoft.Azure.Management.DataFactory.Models
+{
+ using Newtonsoft.Json;
+ using System.Collections;
+ using System.Collections.Generic;
+ using System.Linq;
+
+ ///
+ /// Azure File Storage read settings.
+ ///
+ public partial class AzureFileStorageReadSettings : StoreReadSettings
+ {
+ ///
+ /// Initializes a new instance of the AzureFileStorageReadSettings
+ /// class.
+ ///
+ public AzureFileStorageReadSettings()
+ {
+ CustomInit();
+ }
+
+ ///
+ /// Initializes a new instance of the AzureFileStorageReadSettings
+ /// class.
+ ///
+ /// The read setting type.
+ /// Unmatched properties from the
+ /// message are deserialized this collection
+ /// The maximum concurrent
+ /// connection count for the source data store. Type: integer (or
+ /// Expression with resultType integer).
+ /// If true, files under the folder path will
+ /// be read recursively. Default is true. Type: boolean (or Expression
+ /// with resultType boolean).
+ /// Azure File Storage
+ /// wildcardFolderPath. Type: string (or Expression with resultType
+ /// string).
+ /// Azure File Storage wildcardFileName.
+ /// Type: string (or Expression with resultType string).
+ /// Indicates whether to enable
+ /// partition discovery.
+ /// The start of file's modified
+ /// datetime. Type: string (or Expression with resultType
+ /// string).
+ /// The end of file's modified
+ /// datetime. Type: string (or Expression with resultType
+ /// string).
+ public AzureFileStorageReadSettings(string type, IDictionary additionalProperties = default(IDictionary), object maxConcurrentConnections = default(object), object recursive = default(object), object wildcardFolderPath = default(object), object wildcardFileName = default(object), bool? enablePartitionDiscovery = default(bool?), object modifiedDatetimeStart = default(object), object modifiedDatetimeEnd = default(object))
+ : base(type, additionalProperties, maxConcurrentConnections)
+ {
+ Recursive = recursive;
+ WildcardFolderPath = wildcardFolderPath;
+ WildcardFileName = wildcardFileName;
+ EnablePartitionDiscovery = enablePartitionDiscovery;
+ ModifiedDatetimeStart = modifiedDatetimeStart;
+ ModifiedDatetimeEnd = modifiedDatetimeEnd;
+ CustomInit();
+ }
+
+ ///
+ /// An initialization method that performs custom operations like setting defaults
+ ///
+ partial void CustomInit();
+
+ ///
+ /// Gets or sets if true, files under the folder path will be read
+ /// recursively. Default is true. Type: boolean (or Expression with
+ /// resultType boolean).
+ ///
+ [JsonProperty(PropertyName = "recursive")]
+ public object Recursive { get; set; }
+
+ ///
+ /// Gets or sets azure File Storage wildcardFolderPath. Type: string
+ /// (or Expression with resultType string).
+ ///
+ [JsonProperty(PropertyName = "wildcardFolderPath")]
+ public object WildcardFolderPath { get; set; }
+
+ ///
+ /// Gets or sets azure File Storage wildcardFileName. Type: string (or
+ /// Expression with resultType string).
+ ///
+ [JsonProperty(PropertyName = "wildcardFileName")]
+ public object WildcardFileName { get; set; }
+
+ ///
+ /// Gets or sets indicates whether to enable partition discovery.
+ ///
+ [JsonProperty(PropertyName = "enablePartitionDiscovery")]
+ public bool? EnablePartitionDiscovery { get; set; }
+
+ ///
+ /// Gets or sets the start of file's modified datetime. Type: string
+ /// (or Expression with resultType string).
+ ///
+ [JsonProperty(PropertyName = "modifiedDatetimeStart")]
+ public object ModifiedDatetimeStart { get; set; }
+
+ ///
+ /// Gets or sets the end of file's modified datetime. Type: string (or
+ /// Expression with resultType string).
+ ///
+ [JsonProperty(PropertyName = "modifiedDatetimeEnd")]
+ public object ModifiedDatetimeEnd { get; set; }
+
+ ///
+ /// Validate the object.
+ ///
+ ///
+ /// Thrown if validation fails
+ ///
+ public override void Validate()
+ {
+ base.Validate();
+ }
+ }
+}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureMySqlTableDataset.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureMySqlTableDataset.cs
index e0f40babe577..cb579c2cab12 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureMySqlTableDataset.cs
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureMySqlTableDataset.cs
@@ -53,10 +53,13 @@ public AzureMySqlTableDataset()
/// specified, Dataset will appear at the root level.
/// The Azure MySQL database table name. Type:
/// string (or Expression with resultType string).
- public AzureMySqlTableDataset(LinkedServiceReference linkedServiceName, IDictionary additionalProperties = default(IDictionary), string description = default(string), object structure = default(object), object schema = default(object), IDictionary parameters = default(IDictionary), IList annotations = default(IList), DatasetFolder folder = default(DatasetFolder), object tableName = default(object))
+ /// The name of Azure MySQL database table. Type:
+ /// string (or Expression with resultType string).
+ public AzureMySqlTableDataset(LinkedServiceReference linkedServiceName, IDictionary additionalProperties = default(IDictionary), string description = default(string), object structure = default(object), object schema = default(object), IDictionary parameters = default(IDictionary), IList annotations = default(IList), DatasetFolder folder = default(DatasetFolder), object tableName = default(object), object table = default(object))
: base(linkedServiceName, additionalProperties, description, structure, schema, parameters, annotations, folder)
{
TableName = tableName;
+ Table = table;
CustomInit();
}
@@ -72,6 +75,13 @@ public AzureMySqlTableDataset()
[JsonProperty(PropertyName = "typeProperties.tableName")]
public object TableName { get; set; }
+ ///
+ /// Gets or sets the name of Azure MySQL database table. Type: string
+ /// (or Expression with resultType string).
+ ///
+ [JsonProperty(PropertyName = "typeProperties.table")]
+ public object Table { get; set; }
+
///
/// Validate the object.
///
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/BlobEventsTrigger.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/BlobEventsTrigger.cs
index 43a03a1eadb5..93d16ba6dc4b 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/BlobEventsTrigger.cs
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/BlobEventsTrigger.cs
@@ -58,11 +58,14 @@ public BlobEventsTrigger()
/// 'december/boxes.csv' will only fire the trigger for blobs named
/// boxes in a december folder. At least one of these must be provided:
/// blobPathBeginsWith, blobPathEndsWith.
- public BlobEventsTrigger(IList events, string scope, IDictionary additionalProperties = default(IDictionary), string description = default(string), string runtimeState = default(string), IList annotations = default(IList), IList pipelines = default(IList), string blobPathBeginsWith = default(string), string blobPathEndsWith = default(string))
+ /// If set to true, blobs with zero
+ /// bytes will be ignored.
+ public BlobEventsTrigger(IList events, string scope, IDictionary additionalProperties = default(IDictionary), string description = default(string), string runtimeState = default(string), IList annotations = default(IList), IList pipelines = default(IList), string blobPathBeginsWith = default(string), string blobPathEndsWith = default(string), bool? ignoreEmptyBlobs = default(bool?))
: base(additionalProperties, description, runtimeState, annotations, pipelines)
{
BlobPathBeginsWith = blobPathBeginsWith;
BlobPathEndsWith = blobPathEndsWith;
+ IgnoreEmptyBlobs = ignoreEmptyBlobs;
Events = events;
Scope = scope;
CustomInit();
@@ -93,6 +96,12 @@ public BlobEventsTrigger()
[JsonProperty(PropertyName = "typeProperties.blobPathEndsWith")]
public string BlobPathEndsWith { get; set; }
+ ///
+ /// Gets or sets if set to true, blobs with zero bytes will be ignored.
+ ///
+ [JsonProperty(PropertyName = "typeProperties.ignoreEmptyBlobs")]
+ public bool? IgnoreEmptyBlobs { get; set; }
+
///
/// Gets or sets the type of events that cause this trigger to fire.
///
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/GoogleCloudStorageLinkedService.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/GoogleCloudStorageLinkedService.cs
new file mode 100644
index 000000000000..16c37ac43783
--- /dev/null
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/GoogleCloudStorageLinkedService.cs
@@ -0,0 +1,120 @@
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License. See License.txt in the project root for
+// license information.
+//
+// Code generated by Microsoft (R) AutoRest Code Generator.
+// Changes may cause incorrect behavior and will be lost if the code is
+// regenerated.
+//
+
+namespace Microsoft.Azure.Management.DataFactory.Models
+{
+ using Microsoft.Rest;
+ using Microsoft.Rest.Serialization;
+ using Newtonsoft.Json;
+ using System.Collections;
+ using System.Collections.Generic;
+ using System.Linq;
+
+ ///
+ /// Linked service for Google Cloud Storage.
+ ///
+ [Newtonsoft.Json.JsonObject("GoogleCloudStorage")]
+ [Rest.Serialization.JsonTransformation]
+ public partial class GoogleCloudStorageLinkedService : LinkedService
+ {
+ ///
+ /// Initializes a new instance of the GoogleCloudStorageLinkedService
+ /// class.
+ ///
+ public GoogleCloudStorageLinkedService()
+ {
+ CustomInit();
+ }
+
+ ///
+ /// Initializes a new instance of the GoogleCloudStorageLinkedService
+ /// class.
+ ///
+ /// Unmatched properties from the
+ /// message are deserialized this collection
+ /// The integration runtime reference.
+ /// Linked service description.
+ /// Parameters for linked service.
+ /// List of tags that can be used for
+ /// describing the linked service.
+ /// The access key identifier of the Google
+ /// Cloud Storage Identity and Access Management (IAM) user. Type:
+ /// string (or Expression with resultType string).
+ /// The secret access key of the Google
+ /// Cloud Storage Identity and Access Management (IAM) user.
+ /// This value specifies the endpoint to
+ /// access with the Google Cloud Storage Connector. This is an optional
+ /// property; change it only if you want to try a different service
+ /// endpoint or want to switch between https and http. Type: string (or
+ /// Expression with resultType string).
+ /// The encrypted credential used for
+ /// authentication. Credentials are encrypted using the integration
+ /// runtime credential manager. Type: string (or Expression with
+ /// resultType string).
+ public GoogleCloudStorageLinkedService(IDictionary additionalProperties = default(IDictionary), IntegrationRuntimeReference connectVia = default(IntegrationRuntimeReference), string description = default(string), IDictionary parameters = default(IDictionary), IList annotations = default(IList), object accessKeyId = default(object), SecretBase secretAccessKey = default(SecretBase), object serviceUrl = default(object), object encryptedCredential = default(object))
+ : base(additionalProperties, connectVia, description, parameters, annotations)
+ {
+ AccessKeyId = accessKeyId;
+ SecretAccessKey = secretAccessKey;
+ ServiceUrl = serviceUrl;
+ EncryptedCredential = encryptedCredential;
+ CustomInit();
+ }
+
+ ///
+ /// An initialization method that performs custom operations like setting defaults
+ ///
+ partial void CustomInit();
+
+ ///
+ /// Gets or sets the access key identifier of the Google Cloud Storage
+ /// Identity and Access Management (IAM) user. Type: string (or
+ /// Expression with resultType string).
+ ///
+ [JsonProperty(PropertyName = "typeProperties.accessKeyId")]
+ public object AccessKeyId { get; set; }
+
+ ///
+ /// Gets or sets the secret access key of the Google Cloud Storage
+ /// Identity and Access Management (IAM) user.
+ ///
+ [JsonProperty(PropertyName = "typeProperties.secretAccessKey")]
+ public SecretBase SecretAccessKey { get; set; }
+
+ ///
+ /// Gets or sets this value specifies the endpoint to access with the
+ /// Google Cloud Storage Connector. This is an optional property;
+ /// change it only if you want to try a different service endpoint or
+ /// want to switch between https and http. Type: string (or Expression
+ /// with resultType string).
+ ///
+ [JsonProperty(PropertyName = "typeProperties.serviceUrl")]
+ public object ServiceUrl { get; set; }
+
+ ///
+ /// Gets or sets the encrypted credential used for authentication.
+ /// Credentials are encrypted using the integration runtime credential
+ /// manager. Type: string (or Expression with resultType string).
+ ///
+ [JsonProperty(PropertyName = "typeProperties.encryptedCredential")]
+ public object EncryptedCredential { get; set; }
+
+ ///
+ /// Validate the object.
+ ///
+ ///
+ /// Thrown if validation fails
+ ///
+ public override void Validate()
+ {
+ base.Validate();
+ }
+ }
+}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/GoogleCloudStorageLocation.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/GoogleCloudStorageLocation.cs
new file mode 100644
index 000000000000..420acc46d586
--- /dev/null
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/GoogleCloudStorageLocation.cs
@@ -0,0 +1,84 @@
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License. See License.txt in the project root for
+// license information.
+//
+// Code generated by Microsoft (R) AutoRest Code Generator.
+// Changes may cause incorrect behavior and will be lost if the code is
+// regenerated.
+//
+
+namespace Microsoft.Azure.Management.DataFactory.Models
+{
+ using Newtonsoft.Json;
+ using System.Collections;
+ using System.Collections.Generic;
+ using System.Linq;
+
+ ///
+ /// The location of Google Cloud Storage dataset.
+ ///
+ public partial class GoogleCloudStorageLocation : DatasetLocation
+ {
+ ///
+ /// Initializes a new instance of the GoogleCloudStorageLocation class.
+ ///
+ public GoogleCloudStorageLocation()
+ {
+ CustomInit();
+ }
+
+ ///
+ /// Initializes a new instance of the GoogleCloudStorageLocation class.
+ ///
+ /// Type of dataset storage location.
+ /// Unmatched properties from the
+ /// message are deserialized this collection
+ /// Specify the folder path of dataset. Type:
+ /// string (or Expression with resultType string)
+ /// Specify the file name of dataset. Type:
+ /// string (or Expression with resultType string).
+ /// Specify the bucketName of Google Cloud
+ /// Storage. Type: string (or Expression with resultType
+ /// string)
+ /// Specify the version of Google Cloud Storage.
+ /// Type: string (or Expression with resultType string).
+ public GoogleCloudStorageLocation(string type, IDictionary additionalProperties = default(IDictionary), object folderPath = default(object), object fileName = default(object), object bucketName = default(object), object version = default(object))
+ : base(type, additionalProperties, folderPath, fileName)
+ {
+ BucketName = bucketName;
+ Version = version;
+ CustomInit();
+ }
+
+ ///
+ /// An initialization method that performs custom operations like setting defaults
+ ///
+ partial void CustomInit();
+
+ ///
+ /// Gets or sets specify the bucketName of Google Cloud Storage. Type:
+ /// string (or Expression with resultType string)
+ ///
+ [JsonProperty(PropertyName = "bucketName")]
+ public object BucketName { get; set; }
+
+ ///
+ /// Gets or sets specify the version of Google Cloud Storage. Type:
+ /// string (or Expression with resultType string).
+ ///
+ [JsonProperty(PropertyName = "version")]
+ public object Version { get; set; }
+
+ ///
+ /// Validate the object.
+ ///
+ ///
+ /// Thrown if validation fails
+ ///
+ public override void Validate()
+ {
+ base.Validate();
+ }
+ }
+}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/GoogleCloudStorageReadSettings.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/GoogleCloudStorageReadSettings.cs
new file mode 100644
index 000000000000..78f88bf23012
--- /dev/null
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/GoogleCloudStorageReadSettings.cs
@@ -0,0 +1,140 @@
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License. See License.txt in the project root for
+// license information.
+//
+// Code generated by Microsoft (R) AutoRest Code Generator.
+// Changes may cause incorrect behavior and will be lost if the code is
+// regenerated.
+//
+
+namespace Microsoft.Azure.Management.DataFactory.Models
+{
+ using Newtonsoft.Json;
+ using System.Collections;
+ using System.Collections.Generic;
+ using System.Linq;
+
+ ///
+ /// Google Cloud Storage read settings.
+ ///
+ public partial class GoogleCloudStorageReadSettings : StoreReadSettings
+ {
+ ///
+ /// Initializes a new instance of the GoogleCloudStorageReadSettings
+ /// class.
+ ///
+ public GoogleCloudStorageReadSettings()
+ {
+ CustomInit();
+ }
+
+ ///
+ /// Initializes a new instance of the GoogleCloudStorageReadSettings
+ /// class.
+ ///
+ /// The read setting type.
+ /// Unmatched properties from the
+ /// message are deserialized this collection
+ /// The maximum concurrent
+ /// connection count for the source data store. Type: integer (or
+ /// Expression with resultType integer).
+ /// If true, files under the folder path will
+ /// be read recursively. Default is true. Type: boolean (or Expression
+ /// with resultType boolean).
+ /// Google Cloud Storage
+ /// wildcardFolderPath. Type: string (or Expression with resultType
+ /// string).
+ /// Google Cloud Storage
+ /// wildcardFileName. Type: string (or Expression with resultType
+ /// string).
+ /// The prefix filter for the Google Cloud Storage
+ /// object name. Type: string (or Expression with resultType
+ /// string).
+ /// Indicates whether to enable
+ /// partition discovery.
+ /// The start of file's modified
+ /// datetime. Type: string (or Expression with resultType
+ /// string).
+ /// The end of file's modified
+ /// datetime. Type: string (or Expression with resultType
+ /// string).
+ public GoogleCloudStorageReadSettings(string type, IDictionary additionalProperties = default(IDictionary), object maxConcurrentConnections = default(object), object recursive = default(object), object wildcardFolderPath = default(object), object wildcardFileName = default(object), object prefix = default(object), bool? enablePartitionDiscovery = default(bool?), object modifiedDatetimeStart = default(object), object modifiedDatetimeEnd = default(object))
+ : base(type, additionalProperties, maxConcurrentConnections)
+ {
+ Recursive = recursive;
+ WildcardFolderPath = wildcardFolderPath;
+ WildcardFileName = wildcardFileName;
+ Prefix = prefix;
+ EnablePartitionDiscovery = enablePartitionDiscovery;
+ ModifiedDatetimeStart = modifiedDatetimeStart;
+ ModifiedDatetimeEnd = modifiedDatetimeEnd;
+ CustomInit();
+ }
+
+ ///
+ /// An initialization method that performs custom operations like setting defaults
+ ///
+ partial void CustomInit();
+
+ ///
+ /// Gets or sets if true, files under the folder path will be read
+ /// recursively. Default is true. Type: boolean (or Expression with
+ /// resultType boolean).
+ ///
+ [JsonProperty(PropertyName = "recursive")]
+ public object Recursive { get; set; }
+
+ ///
+ /// Gets or sets google Cloud Storage wildcardFolderPath. Type: string
+ /// (or Expression with resultType string).
+ ///
+ [JsonProperty(PropertyName = "wildcardFolderPath")]
+ public object WildcardFolderPath { get; set; }
+
+ ///
+ /// Gets or sets google Cloud Storage wildcardFileName. Type: string
+ /// (or Expression with resultType string).
+ ///
+ [JsonProperty(PropertyName = "wildcardFileName")]
+ public object WildcardFileName { get; set; }
+
+ ///
+ /// Gets or sets the prefix filter for the Google Cloud Storage object
+ /// name. Type: string (or Expression with resultType string).
+ ///
+ [JsonProperty(PropertyName = "prefix")]
+ public object Prefix { get; set; }
+
+ ///
+ /// Gets or sets indicates whether to enable partition discovery.
+ ///
+ [JsonProperty(PropertyName = "enablePartitionDiscovery")]
+ public bool? EnablePartitionDiscovery { get; set; }
+
+ ///
+ /// Gets or sets the start of file's modified datetime. Type: string
+ /// (or Expression with resultType string).
+ ///
+ [JsonProperty(PropertyName = "modifiedDatetimeStart")]
+ public object ModifiedDatetimeStart { get; set; }
+
+ ///
+ /// Gets or sets the end of file's modified datetime. Type: string (or
+ /// Expression with resultType string).
+ ///
+ [JsonProperty(PropertyName = "modifiedDatetimeEnd")]
+ public object ModifiedDatetimeEnd { get; set; }
+
+ ///
+ /// Validate the object.
+ ///
+ ///
+ /// Thrown if validation fails
+ ///
+ public override void Validate()
+ {
+ base.Validate();
+ }
+ }
+}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/SdkInfo_DataFactoryManagementClient.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/SdkInfo_DataFactoryManagementClient.cs
index 71c387f0d90e..97bdfb9adb28 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/SdkInfo_DataFactoryManagementClient.cs
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/SdkInfo_DataFactoryManagementClient.cs
@@ -41,10 +41,10 @@ public static IEnumerable> ApiInfo_DataFactoryMana
// BEGIN: Code Generation Metadata Section
public static readonly String AutoRestVersion = "latest";
public static readonly String AutoRestBootStrapperVersion = "autorest@2.0.4283";
- public static readonly String AutoRestCmdExecuted = "cmd.exe /c autorest.cmd https://github.com/Azure/azure-rest-api-specs/blob/master/specification/datafactory/resource-manager/readme.md --csharp --version=latest --reflect-api-versions --tag=package-2018-06 --csharp-sdks-folder=D:\\GitHub\\azure-sdk-for-net\\sdk";
+ public static readonly String AutoRestCmdExecuted = "cmd.exe /c autorest.cmd https://github.com/Azure/azure-rest-api-specs/blob/master/specification/datafactory/resource-manager/readme.md --csharp --version=latest --reflect-api-versions --tag=package-2018-06 --csharp-sdks-folder=D:\\Source\\adf0929\\azure-sdk-for-net\\sdk";
public static readonly String GithubForkName = "Azure";
public static readonly String GithubBranchName = "master";
- public static readonly String GithubCommidId = "dce1ac7d8caf0f46741b41ec65f2d1710f0b8c2c";
+ public static readonly String GithubCommidId = "9aaa1b6b1ceddfe31e03867fdfa1cedebb99185d";
public static readonly String CodeGenerationErrors = "";
public static readonly String GithubRepoName = "azure-rest-api-specs";
// END: Code Generation Metadata Section
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj
index 951477535bee..01c3ecf819da 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj
@@ -5,18 +5,20 @@
Microsoft.Azure.Management.DataFactory
Azure Data Factory V2 is the data integration platform that goes beyond Azure Data Factory V1's orchestration and batch-processing of time-series data, with a general purpose app model supporting modern data warehousing patterns and scenarios, lift-and-shift SSIS, and data-driven SaaS applications. Compose and manage reliable and secure data integration workflows at scale. Use native ADF data connectors and Integration Runtimes to move and transform cloud and on-premises data that can be unstructured, semi-structured, and structured with Hadoop, Azure Data Lake, Spark, SQL Server, Cosmos DB and many other data platforms.
- 4.2.0
+ 4.2.1
Microsoft.Azure.Management.DataFactory
Microsoft Azure resource management;Data Factory;ADF;
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Properties/AssemblyInfo.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Properties/AssemblyInfo.cs
index 2f29c73889c8..f9e8895e3bf3 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Properties/AssemblyInfo.cs
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Properties/AssemblyInfo.cs
@@ -7,7 +7,7 @@
[assembly: AssemblyTitle("Microsoft Azure Data Factory Management Library")]
[assembly: AssemblyDescription("Provides management functionality for Microsoft Azure Data Factory Resources.")]
[assembly: AssemblyVersion("4.2.0.0")]
-[assembly: AssemblyFileVersion("4.2.0.0")]
+[assembly: AssemblyFileVersion("4.2.1.0")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("Microsoft")]
[assembly: AssemblyProduct("Microsoft Azure .NET SDK")]
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/changelog.md b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/changelog.md
index 17e24aaa7973..4a83c4f5e21b 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/changelog.md
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/changelog.md
@@ -3,6 +3,11 @@
## Current version
### Feature Additions
+## Version 4.2.1
+### Feature Additions
+- Add dedicated linkedservice/dataset/source for Azure File Storage;
+- Add dedicated linkedservice/dataset/source for GoogleCloudStorage;
+
## Version 4.2.0
### Feature Additions
* Added support for Data Flow:
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/DatasetJsonSamples.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/DatasetJsonSamples.cs
index a1ec58c48425..0b35d1d23987 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/DatasetJsonSamples.cs
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/DatasetJsonSamples.cs
@@ -1768,5 +1768,79 @@ public class DatasetJsonSamples : JsonSampleCollection
}
}
";
+
+ [JsonSample]
+ public const string AzureMySqlTableWithTable = @"
+{
+ name: ""AzureMySqlTable"",
+ properties:
+ {
+ type: ""AzureMySqlTable"",
+ linkedServiceName:
+ {
+ referenceName : ""ls"",
+ type : ""LinkedServiceReference""
+ },
+ typeProperties:
+ {
+ table: ""$EncryptedString$MyEncryptedTable""
+ }
+ }
+}
+";
+
+ [JsonSample]
+ public const string AzureFileStorage = @"
+{
+ name: ""AzureFileStorageWithTextDataset"",
+ properties:
+ {
+ type: ""DelimitedText"",
+ linkedServiceName:
+ {
+ referenceName : ""ls"",
+ type : ""LinkedServiceReference""
+ },
+ typeProperties:
+ {
+ ""location"": {
+ ""type"": ""AzureFileStorageLocation"",
+ ""bucketName"": ""bucketname"",
+ ""folderPath"": ""folder/subfolder""
+ },
+ ""columnDelimiter"": "","",
+ ""quoteChar"": ""\"""",
+ ""firstRowAsHeader"": true,
+ ""compressionCodec"": ""gzip""
+ },
+ }
+}";
+
+ [JsonSample]
+ public const string GoogleCloudStorageDataset = @"
+{
+ name: ""GoogleCloudStorageWithTextDataset"",
+ properties:
+ {
+ type: ""DelimitedText"",
+ linkedServiceName:
+ {
+ referenceName : ""ls"",
+ type : ""LinkedServiceReference""
+ },
+ typeProperties:
+ {
+ ""location"": {
+ ""type"": ""GoogleCloudStorageLocation"",
+ ""bucketName"": ""bucketname"",
+ ""folderPath"": ""folder/subfolder""
+ },
+ ""columnDelimiter"": "","",
+ ""quoteChar"": ""\"""",
+ ""firstRowAsHeader"": true,
+ ""compressionCodec"": ""gzip""
+ },
+ }
+}";
}
}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs
index 67b4eaf3cd7e..55c8c75287b7 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs
@@ -2280,5 +2280,44 @@ public class LinkedServiceJsonSamples : JsonSampleCollection
}
}
";
+
+ [JsonSample]
+ public const string CopyActivity_DelimitedText_GoogleCloudStorage = @"{
+ ""properties"": {
+ ""activities"": [
+ {
+ ""type"": ""Copy"",
+ ""typeProperties"": {
+ ""source"": {
+ ""type"": ""DelimitedTextSource"",
+ ""storeSettings"": {
+ ""type"": ""GoogleCloudStorageReadSettings"",
+ ""recursive"": true,
+ ""prefix"": ""fakeprefix"",
+ ""wildcardFileName"": ""*.csv"",
+ ""wildcardFolderPath"": ""A*"",
+ ""modifiedDatetimeStart"": ""2019-07-02T00:00:00.000Z"",
+ ""modifiedDatetimeEnd"": ""2019-07-03T00:00:00.000Z""
+ },
+ ""formatSettings"": {
+ ""type"": ""DelimitedTextReadSettings"",
+ ""skipLineCount"": 10,
+ ""additionalNullValues"": [ ""\\N"", ""NULL"" ]
+ }
+ },
+ ""sink"": {
+ ""type"": ""DelimitedTextSink"",
+ ""storeSettings"": {
+ ""type"": ""AzureDataLakeStoreWriteSettings"",
+ ""maxConcurrentConnections"": 3,
+ ""copyBehavior"": ""PreserveHierarchy""
+ },
+ ""formatSettings"": {
+ ""type"": ""DelimitedTextWriteSettings"",
+ ""quoteAllText"": true,
+ ""fileExtension"": "".csv""
+ }
+ }
+ },
+ ""inputs"": [
+ {
+ ""referenceName"": ""exampleDataset"",
+ ""type"": ""DatasetReference""
+ }
+ ],
+ ""outputs"": [
+ {
+ ""referenceName"": ""exampleDataset"",
+ ""type"": ""DatasetReference""
+ }
+ ],
+ ""name"": ""ExampleCopyActivity""
+ }
+ ]
+ }
+}";
+
+ [JsonSample]
+ public const string CopyActivity_DelimitedText_AzureFileStorage = @"{
+ ""properties"": {
+ ""activities"": [
+ {
+ ""type"": ""Copy"",
+ ""typeProperties"": {
+ ""source"": {
+ ""type"": ""DelimitedTextSource"",
+ ""storeSettings"": {
+ ""type"": ""AzureFileStorageReadSettings"",
+ ""recursive"": true,
+ ""wildcardFileName"": ""*.csv"",
+ ""wildcardFolderPath"": ""A*"",
+ ""modifiedDatetimeStart"": ""2019-07-02T00:00:00.000Z"",
+ ""modifiedDatetimeEnd"": ""2019-07-03T00:00:00.000Z"",
+ ""enablePartitionDiscovery"": true
+ },
+ ""formatSettings"": {
+ ""type"": ""DelimitedTextReadSettings"",
+ ""skipLineCount"": 10,
+ ""additionalNullValues"": [ ""\\N"", ""NULL"" ]
+ }
+ },
+ ""sink"": {
+ ""type"": ""DelimitedTextSink"",
+ ""storeSettings"": {
+ ""type"": ""AzureDataLakeStoreWriteSettings"",
+ ""maxConcurrentConnections"": 3,
+ ""copyBehavior"": ""PreserveHierarchy""
+ },
+ ""formatSettings"": {
+ ""type"": ""DelimitedTextWriteSettings"",
+ ""quoteAllText"": true,
+ ""fileExtension"": "".csv""
+ }
+ }
+ },
+ ""inputs"": [
+ {
+ ""referenceName"": ""exampleDataset"",
+ ""type"": ""DatasetReference""
+ }
+ ],
+ ""outputs"": [
+ {
+ ""referenceName"": ""exampleDataset"",
+ ""type"": ""DatasetReference""
+ }
+ ],
+ ""name"": ""ExampleCopyActivity""
+ }
+ ]
+ }
+}";
}
}