diff --git a/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt b/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt
index 851c0094f1a1..88213a692e47 100644
--- a/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt
+++ b/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt
@@ -4,11 +4,11 @@ Commencing code generation
Generating CSharp code
Executing AutoRest command
cmd.exe /c autorest.cmd https://github.com/Azure/azure-rest-api-specs/blob/master/specification/datafactory/resource-manager/readme.md --csharp --version=v2 --reflect-api-versions --tag=package-2018-06 --csharp-sdks-folder=D:\Projects\azure-sdk-for-net\sdk
-2020-09-01 05:25:03 UTC
+2020-09-21 07:26:19 UTC
Azure-rest-api-specs repository information
GitHub fork: Azure
Branch: master
-Commit: aa96f138f37c06bdbf3458a4fa327f08a593594c
+Commit: fce3400431eff281bddd04bed9727e63765b8da0
AutoRest information
Requested version: v2
Bootstrapper version: autorest@2.0.4413
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md
index 8c741351ef74..bc38aa927cf9 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md
@@ -4,6 +4,8 @@
### Feature Additions
- Added logLevel/enableReliableLogging to LogStorageSettings
- Support Tar GZip compression type in Data Factory
+- Added maxRowPerFile/fileNamePrefix to tabular format settings
+- Added support for AzureDatabricks delta lake
## Version 4.11.0
### Feature Additions
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/TabularPartitionOption.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/TabularPartitionOption.cs
deleted file mode 100644
index 9730699a3c58..000000000000
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/TabularPartitionOption.cs
+++ /dev/null
@@ -1,23 +0,0 @@
-// Copyright (c) Microsoft Corporation. All rights reserved.
-// Licensed under the MIT License. See License.txt in the project root for
-// license information.
-//
-// Code generated by Microsoft (R) AutoRest Code Generator 1.1.0.0
-// Changes may cause incorrect behavior and will be lost if the code is
-// regenerated.
-
-namespace Microsoft.Azure.Management.DataFactory.Models
-{
-
- ///
- /// Defines values for TabularPartitionOption.
- ///
- public static class TabularPartitionOption
- {
- public const string None = "None";
- public const string PartitionOnInt = "PartitionOnInt";
- public const string PartitionOnCalendarYear = "PartitionOnCalendarYear";
- public const string PartitionOnCalendarMonth = "PartitionOnCalendarMonth";
- public const string PartitionOnCalendarDate = "PartitionOnCalendarDate";
- }
-}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/DataFactoryManagementClient.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/DataFactoryManagementClient.cs
index 028d20fc7a8a..46f1baaa95ab 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/DataFactoryManagementClient.cs
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/DataFactoryManagementClient.cs
@@ -501,6 +501,8 @@ private void Initialize()
DeserializationSettings.Converters.Add(new PolymorphicDeserializeJsonConverter("type"));
SerializationSettings.Converters.Add(new PolymorphicSerializeJsonConverter("type"));
DeserializationSettings.Converters.Add(new PolymorphicDeserializeJsonConverter("type"));
+ SerializationSettings.Converters.Add(new PolymorphicSerializeJsonConverter("type"));
+ DeserializationSettings.Converters.Add(new PolymorphicDeserializeJsonConverter("type"));
CustomInitialize();
DeserializationSettings.Converters.Add(new TransformationJsonConverter());
DeserializationSettings.Converters.Add(new CloudErrorJsonConverter());
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AvroWriteSettings.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AvroWriteSettings.cs
index f0bdfe3267bd..dfa465d8007b 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AvroWriteSettings.cs
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AvroWriteSettings.cs
@@ -37,11 +37,20 @@ public AvroWriteSettings()
/// which is required in AVRO spec.
/// Record namespace in the write
/// result.
- public AvroWriteSettings(IDictionary additionalProperties = default(IDictionary), string recordName = default(string), string recordNamespace = default(string))
+ /// Limit the written file's row count to
+ /// be smaller than or equal to the specified count. Type: integer (or
+ /// Expression with resultType integer).
+ /// Specifies the file name pattern
+ /// <fileNamePrefix>_<fileIndex>.<fileExtension> when
+ /// copy from non-file based store without partitionOptions. Type:
+ /// string (or Expression with resultType string).
+ public AvroWriteSettings(IDictionary additionalProperties = default(IDictionary), string recordName = default(string), string recordNamespace = default(string), object maxRowsPerFile = default(object), object fileNamePrefix = default(object))
: base(additionalProperties)
{
RecordName = recordName;
RecordNamespace = recordNamespace;
+ MaxRowsPerFile = maxRowsPerFile;
+ FileNamePrefix = fileNamePrefix;
CustomInit();
}
@@ -63,5 +72,22 @@ public AvroWriteSettings()
[JsonProperty(PropertyName = "recordNamespace")]
public string RecordNamespace { get; set; }
+ ///
+ /// Gets or sets limit the written file's row count to be smaller than
+ /// or equal to the specified count. Type: integer (or Expression with
+ /// resultType integer).
+ ///
+ [JsonProperty(PropertyName = "maxRowsPerFile")]
+ public object MaxRowsPerFile { get; set; }
+
+ ///
+ /// Gets or sets specifies the file name pattern
+ /// <fileNamePrefix>_<fileIndex>.<fileExtension>
+ /// when copy from non-file based store without partitionOptions. Type:
+ /// string (or Expression with resultType string).
+ ///
+ [JsonProperty(PropertyName = "fileNamePrefix")]
+ public object FileNamePrefix { get; set; }
+
}
}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeDataset.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeDataset.cs
new file mode 100644
index 000000000000..88ce47ca4f48
--- /dev/null
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeDataset.cs
@@ -0,0 +1,97 @@
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License. See License.txt in the project root for
+// license information.
+//
+// Code generated by Microsoft (R) AutoRest Code Generator.
+// Changes may cause incorrect behavior and will be lost if the code is
+// regenerated.
+//
+
+namespace Microsoft.Azure.Management.DataFactory.Models
+{
+ using Microsoft.Rest;
+ using Microsoft.Rest.Serialization;
+ using Newtonsoft.Json;
+ using System.Collections;
+ using System.Collections.Generic;
+ using System.Linq;
+
+ ///
+ /// Azure Databricks Delta Lake dataset.
+ ///
+ [Rest.Serialization.JsonTransformation]
+ public partial class AzureDatabricksDeltaLakeDataset : Dataset
+ {
+ ///
+ /// Initializes a new instance of the AzureDatabricksDeltaLakeDataset
+ /// class.
+ ///
+ public AzureDatabricksDeltaLakeDataset()
+ {
+ LinkedServiceName = new LinkedServiceReference();
+ CustomInit();
+ }
+
+ ///
+ /// Initializes a new instance of the AzureDatabricksDeltaLakeDataset
+ /// class.
+ ///
+ /// Linked service reference.
+ /// Unmatched properties from the
+ /// message are deserialized this collection
+ /// Dataset description.
+ /// Columns that define the structure of the
+ /// dataset. Type: array (or Expression with resultType array),
+ /// itemType: DatasetDataElement.
+ /// Columns that define the physical type schema
+ /// of the dataset. Type: array (or Expression with resultType array),
+ /// itemType: DatasetSchemaDataElement.
+ /// Parameters for dataset.
+ /// List of tags that can be used for
+ /// describing the Dataset.
+ /// The folder that this Dataset is in. If not
+ /// specified, Dataset will appear at the root level.
+ /// The name of delta table. Type: string (or
+ /// Expression with resultType string).
+ /// The database name of delta table. Type:
+ /// string (or Expression with resultType string).
+ public AzureDatabricksDeltaLakeDataset(LinkedServiceReference linkedServiceName, IDictionary additionalProperties = default(IDictionary), string description = default(string), object structure = default(object), object schema = default(object), IDictionary parameters = default(IDictionary), IList annotations = default(IList), DatasetFolder folder = default(DatasetFolder), object table = default(object), object database = default(object))
+ : base(linkedServiceName, additionalProperties, description, structure, schema, parameters, annotations, folder)
+ {
+ Table = table;
+ Database = database;
+ CustomInit();
+ }
+
+ ///
+ /// An initialization method that performs custom operations like setting defaults
+ ///
+ partial void CustomInit();
+
+ ///
+ /// Gets or sets the name of delta table. Type: string (or Expression
+ /// with resultType string).
+ ///
+ [JsonProperty(PropertyName = "typeProperties.table")]
+ public object Table { get; set; }
+
+ ///
+ /// Gets or sets the database name of delta table. Type: string (or
+ /// Expression with resultType string).
+ ///
+ [JsonProperty(PropertyName = "typeProperties.database")]
+ public object Database { get; set; }
+
+ ///
+ /// Validate the object.
+ ///
+ ///
+ /// Thrown if validation fails
+ ///
+ public override void Validate()
+ {
+ base.Validate();
+ }
+ }
+}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeExportCommand.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeExportCommand.cs
new file mode 100644
index 000000000000..2a95efec6934
--- /dev/null
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeExportCommand.cs
@@ -0,0 +1,74 @@
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License. See License.txt in the project root for
+// license information.
+//
+// Code generated by Microsoft (R) AutoRest Code Generator.
+// Changes may cause incorrect behavior and will be lost if the code is
+// regenerated.
+//
+
+namespace Microsoft.Azure.Management.DataFactory.Models
+{
+ using Newtonsoft.Json;
+ using System.Collections;
+ using System.Collections.Generic;
+ using System.Linq;
+
+ ///
+ /// Azure Databricks Delta Lake export command settings.
+ ///
+ public partial class AzureDatabricksDeltaLakeExportCommand : ExportSettings
+ {
+ ///
+ /// Initializes a new instance of the
+ /// AzureDatabricksDeltaLakeExportCommand class.
+ ///
+ public AzureDatabricksDeltaLakeExportCommand()
+ {
+ CustomInit();
+ }
+
+ ///
+ /// Initializes a new instance of the
+ /// AzureDatabricksDeltaLakeExportCommand class.
+ ///
+ /// Unmatched properties from the
+ /// message are deserialized this collection
+ /// Specify the date format for the csv in
+ /// Azure Databricks Delta Lake Copy. Type: string (or Expression with
+ /// resultType string).
+ /// Specify the timestamp format for the
+ /// csv in Azure Databricks Delta Lake Copy. Type: string (or
+ /// Expression with resultType string).
+ public AzureDatabricksDeltaLakeExportCommand(IDictionary additionalProperties = default(IDictionary), object dateFormat = default(object), object timestampFormat = default(object))
+ : base(additionalProperties)
+ {
+ DateFormat = dateFormat;
+ TimestampFormat = timestampFormat;
+ CustomInit();
+ }
+
+ ///
+ /// An initialization method that performs custom operations like setting defaults
+ ///
+ partial void CustomInit();
+
+ ///
+ /// Gets or sets specify the date format for the csv in Azure
+ /// Databricks Delta Lake Copy. Type: string (or Expression with
+ /// resultType string).
+ ///
+ [JsonProperty(PropertyName = "dateFormat")]
+ public object DateFormat { get; set; }
+
+ ///
+ /// Gets or sets specify the timestamp format for the csv in Azure
+ /// Databricks Delta Lake Copy. Type: string (or Expression with
+ /// resultType string).
+ ///
+ [JsonProperty(PropertyName = "timestampFormat")]
+ public object TimestampFormat { get; set; }
+
+ }
+}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeImportCommand.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeImportCommand.cs
new file mode 100644
index 000000000000..79e29fe8232d
--- /dev/null
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeImportCommand.cs
@@ -0,0 +1,74 @@
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License. See License.txt in the project root for
+// license information.
+//
+// Code generated by Microsoft (R) AutoRest Code Generator.
+// Changes may cause incorrect behavior and will be lost if the code is
+// regenerated.
+//
+
+namespace Microsoft.Azure.Management.DataFactory.Models
+{
+ using Newtonsoft.Json;
+ using System.Collections;
+ using System.Collections.Generic;
+ using System.Linq;
+
+ ///
+ /// Azure Databricks Delta Lake import command settings.
+ ///
+ public partial class AzureDatabricksDeltaLakeImportCommand : ImportSettings
+ {
+ ///
+ /// Initializes a new instance of the
+ /// AzureDatabricksDeltaLakeImportCommand class.
+ ///
+ public AzureDatabricksDeltaLakeImportCommand()
+ {
+ CustomInit();
+ }
+
+ ///
+ /// Initializes a new instance of the
+ /// AzureDatabricksDeltaLakeImportCommand class.
+ ///
+ /// Unmatched properties from the
+ /// message are deserialized this collection
+ /// Specify the date format for csv in Azure
+ /// Databricks Delta Lake Copy. Type: string (or Expression with
+ /// resultType string).
+ /// Specify the timestamp format for csv
+ /// in Azure Databricks Delta Lake Copy. Type: string (or Expression
+ /// with resultType string).
+ public AzureDatabricksDeltaLakeImportCommand(IDictionary additionalProperties = default(IDictionary), object dateFormat = default(object), object timestampFormat = default(object))
+ : base(additionalProperties)
+ {
+ DateFormat = dateFormat;
+ TimestampFormat = timestampFormat;
+ CustomInit();
+ }
+
+ ///
+ /// An initialization method that performs custom operations like setting defaults
+ ///
+ partial void CustomInit();
+
+ ///
+ /// Gets or sets specify the date format for csv in Azure Databricks
+ /// Delta Lake Copy. Type: string (or Expression with resultType
+ /// string).
+ ///
+ [JsonProperty(PropertyName = "dateFormat")]
+ public object DateFormat { get; set; }
+
+ ///
+ /// Gets or sets specify the timestamp format for csv in Azure
+ /// Databricks Delta Lake Copy. Type: string (or Expression with
+ /// resultType string).
+ ///
+ [JsonProperty(PropertyName = "timestampFormat")]
+ public object TimestampFormat { get; set; }
+
+ }
+}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeLinkedService.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeLinkedService.cs
new file mode 100644
index 000000000000..ec88ac66a5ac
--- /dev/null
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeLinkedService.cs
@@ -0,0 +1,123 @@
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License. See License.txt in the project root for
+// license information.
+//
+// Code generated by Microsoft (R) AutoRest Code Generator.
+// Changes may cause incorrect behavior and will be lost if the code is
+// regenerated.
+//
+
+namespace Microsoft.Azure.Management.DataFactory.Models
+{
+ using Microsoft.Rest;
+ using Microsoft.Rest.Serialization;
+ using Newtonsoft.Json;
+ using System.Collections;
+ using System.Collections.Generic;
+ using System.Linq;
+
+ ///
+ /// Azure Databricks Delta Lake linked service.
+ ///
+ [Newtonsoft.Json.JsonObject("AzureDatabricksDeltaLake")]
+ [Rest.Serialization.JsonTransformation]
+ public partial class AzureDatabricksDeltaLakeLinkedService : LinkedService
+ {
+ ///
+ /// Initializes a new instance of the
+ /// AzureDatabricksDeltaLakeLinkedService class.
+ ///
+ public AzureDatabricksDeltaLakeLinkedService()
+ {
+ CustomInit();
+ }
+
+ ///
+ /// Initializes a new instance of the
+ /// AzureDatabricksDeltaLakeLinkedService class.
+ ///
+ /// <REGION>.azuredatabricks.net, domain
+ /// name of your Databricks deployment. Type: string (or Expression
+ /// with resultType string).
+ /// Unmatched properties from the
+ /// message are deserialized this collection
+ /// The integration runtime reference.
+ /// Linked service description.
+ /// Parameters for linked service.
+ /// List of tags that can be used for
+ /// describing the linked service.
+ /// Access token for databricks REST API.
+ /// Refer to
+ /// https://docs.azuredatabricks.net/api/latest/authentication.html.
+ /// Type: string, SecureString or AzureKeyVaultSecretReference.
+ /// The id of an existing interactive cluster
+ /// that will be used for all runs of this job. Type: string (or
+ /// Expression with resultType string).
+ /// The encrypted credential used for
+ /// authentication. Credentials are encrypted using the integration
+ /// runtime credential manager. Type: string (or Expression with
+ /// resultType string).
+ public AzureDatabricksDeltaLakeLinkedService(object domain, IDictionary additionalProperties = default(IDictionary), IntegrationRuntimeReference connectVia = default(IntegrationRuntimeReference), string description = default(string), IDictionary parameters = default(IDictionary), IList annotations = default(IList), SecretBase accessToken = default(SecretBase), object clusterId = default(object), object encryptedCredential = default(object))
+ : base(additionalProperties, connectVia, description, parameters, annotations)
+ {
+ Domain = domain;
+ AccessToken = accessToken;
+ ClusterId = clusterId;
+ EncryptedCredential = encryptedCredential;
+ CustomInit();
+ }
+
+ ///
+ /// An initialization method that performs custom operations like setting defaults
+ ///
+ partial void CustomInit();
+
+ ///
+ /// Gets or sets <REGION>.azuredatabricks.net, domain
+ /// name of your Databricks deployment. Type: string (or Expression
+ /// with resultType string).
+ ///
+ [JsonProperty(PropertyName = "typeProperties.domain")]
+ public object Domain { get; set; }
+
+ ///
+ /// Gets or sets access token for databricks REST API. Refer to
+ /// https://docs.azuredatabricks.net/api/latest/authentication.html.
+ /// Type: string, SecureString or AzureKeyVaultSecretReference.
+ ///
+ [JsonProperty(PropertyName = "typeProperties.accessToken")]
+ public SecretBase AccessToken { get; set; }
+
+ ///
+ /// Gets or sets the id of an existing interactive cluster that will be
+ /// used for all runs of this job. Type: string (or Expression with
+ /// resultType string).
+ ///
+ [JsonProperty(PropertyName = "typeProperties.clusterId")]
+ public object ClusterId { get; set; }
+
+ ///
+ /// Gets or sets the encrypted credential used for authentication.
+ /// Credentials are encrypted using the integration runtime credential
+ /// manager. Type: string (or Expression with resultType string).
+ ///
+ [JsonProperty(PropertyName = "typeProperties.encryptedCredential")]
+ public object EncryptedCredential { get; set; }
+
+ ///
+ /// Validate the object.
+ ///
+ ///
+ /// Thrown if validation fails
+ ///
+ public override void Validate()
+ {
+ base.Validate();
+ if (Domain == null)
+ {
+ throw new ValidationException(ValidationRules.CannotBeNull, "Domain");
+ }
+ }
+ }
+}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeSink.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeSink.cs
new file mode 100644
index 000000000000..7df2d3a1dd14
--- /dev/null
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeSink.cs
@@ -0,0 +1,82 @@
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License. See License.txt in the project root for
+// license information.
+//
+// Code generated by Microsoft (R) AutoRest Code Generator.
+// Changes may cause incorrect behavior and will be lost if the code is
+// regenerated.
+//
+
+namespace Microsoft.Azure.Management.DataFactory.Models
+{
+ using Newtonsoft.Json;
+ using System.Collections;
+ using System.Collections.Generic;
+ using System.Linq;
+
+ ///
+ /// A copy activity Azure Databricks Delta Lake sink.
+ ///
+ public partial class AzureDatabricksDeltaLakeSink : CopySink
+ {
+ ///
+ /// Initializes a new instance of the AzureDatabricksDeltaLakeSink
+ /// class.
+ ///
+ public AzureDatabricksDeltaLakeSink()
+ {
+ CustomInit();
+ }
+
+ ///
+ /// Initializes a new instance of the AzureDatabricksDeltaLakeSink
+ /// class.
+ ///
+ /// Unmatched properties from the
+ /// message are deserialized this collection
+ /// Write batch size. Type: integer (or
+ /// Expression with resultType integer), minimum: 0.
+ /// Write batch timeout. Type: string
+ /// (or Expression with resultType string), pattern:
+ /// ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ /// Sink retry count. Type: integer (or
+ /// Expression with resultType integer).
+ /// Sink retry wait. Type: string (or
+ /// Expression with resultType string), pattern:
+ /// ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ /// The maximum concurrent
+ /// connection count for the sink data store. Type: integer (or
+ /// Expression with resultType integer).
+ /// SQL pre-copy script. Type: string (or
+ /// Expression with resultType string).
+ /// Azure Databricks Delta Lake import
+ /// settings.
+ public AzureDatabricksDeltaLakeSink(IDictionary additionalProperties = default(IDictionary), object writeBatchSize = default(object), object writeBatchTimeout = default(object), object sinkRetryCount = default(object), object sinkRetryWait = default(object), object maxConcurrentConnections = default(object), object preCopyScript = default(object), AzureDatabricksDeltaLakeImportCommand importSettings = default(AzureDatabricksDeltaLakeImportCommand))
+ : base(additionalProperties, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections)
+ {
+ PreCopyScript = preCopyScript;
+ ImportSettings = importSettings;
+ CustomInit();
+ }
+
+ ///
+ /// An initialization method that performs custom operations like setting defaults
+ ///
+ partial void CustomInit();
+
+ ///
+ /// Gets or sets SQL pre-copy script. Type: string (or Expression with
+ /// resultType string).
+ ///
+ [JsonProperty(PropertyName = "preCopyScript")]
+ public object PreCopyScript { get; set; }
+
+ ///
+ /// Gets or sets azure Databricks Delta Lake import settings.
+ ///
+ [JsonProperty(PropertyName = "importSettings")]
+ public AzureDatabricksDeltaLakeImportCommand ImportSettings { get; set; }
+
+ }
+}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeSource.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeSource.cs
new file mode 100644
index 000000000000..55f52dbb477d
--- /dev/null
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeSource.cs
@@ -0,0 +1,77 @@
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License. See License.txt in the project root for
+// license information.
+//
+// Code generated by Microsoft (R) AutoRest Code Generator.
+// Changes may cause incorrect behavior and will be lost if the code is
+// regenerated.
+//
+
+namespace Microsoft.Azure.Management.DataFactory.Models
+{
+ using Newtonsoft.Json;
+ using System.Collections;
+ using System.Collections.Generic;
+ using System.Linq;
+
+ ///
+ /// A copy activity Azure Databricks Delta Lake source.
+ ///
+ public partial class AzureDatabricksDeltaLakeSource : CopySource
+ {
+ ///
+ /// Initializes a new instance of the AzureDatabricksDeltaLakeSource
+ /// class.
+ ///
+ public AzureDatabricksDeltaLakeSource()
+ {
+ CustomInit();
+ }
+
+ ///
+ /// Initializes a new instance of the AzureDatabricksDeltaLakeSource
+ /// class.
+ ///
+ /// Unmatched properties from the
+ /// message are deserialized this collection
+ /// Source retry count. Type: integer
+ /// (or Expression with resultType integer).
+ /// Source retry wait. Type: string (or
+ /// Expression with resultType string), pattern:
+ /// ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ /// The maximum concurrent
+ /// connection count for the source data store. Type: integer (or
+ /// Expression with resultType integer).
+ /// Azure Databricks Delta Lake Sql query. Type:
+ /// string (or Expression with resultType string).
+ /// Azure Databricks Delta Lake export
+ /// settings.
+ public AzureDatabricksDeltaLakeSource(IDictionary additionalProperties = default(IDictionary), object sourceRetryCount = default(object), object sourceRetryWait = default(object), object maxConcurrentConnections = default(object), object query = default(object), AzureDatabricksDeltaLakeExportCommand exportSettings = default(AzureDatabricksDeltaLakeExportCommand))
+ : base(additionalProperties, sourceRetryCount, sourceRetryWait, maxConcurrentConnections)
+ {
+ Query = query;
+ ExportSettings = exportSettings;
+ CustomInit();
+ }
+
+ ///
+ /// An initialization method that performs custom operations like setting defaults
+ ///
+ partial void CustomInit();
+
+ ///
+ /// Gets or sets azure Databricks Delta Lake Sql query. Type: string
+ /// (or Expression with resultType string).
+ ///
+ [JsonProperty(PropertyName = "query")]
+ public object Query { get; set; }
+
+ ///
+ /// Gets or sets azure Databricks Delta Lake export settings.
+ ///
+ [JsonProperty(PropertyName = "exportSettings")]
+ public AzureDatabricksDeltaLakeExportCommand ExportSettings { get; set; }
+
+ }
+}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/CopyBehaviorType.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/CopyBehaviorType.cs
similarity index 86%
rename from sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/CopyBehaviorType.cs
rename to sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/CopyBehaviorType.cs
index 42805261ca0e..01cc5d50b1bb 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/CopyBehaviorType.cs
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/CopyBehaviorType.cs
@@ -1,10 +1,12 @@
+//
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
-// Code generated by Microsoft (R) AutoRest Code Generator 1.1.0.0
+// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
+//
namespace Microsoft.Azure.Management.DataFactory.Models
{
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/CopyTranslator.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/CopyTranslator.cs
similarity index 100%
rename from sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/CopyTranslator.cs
rename to sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/CopyTranslator.cs
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/DatasetDataElement.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DatasetDataElement.cs
similarity index 90%
rename from sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/DatasetDataElement.cs
rename to sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DatasetDataElement.cs
index 3a0df0cf6014..d6871725ed8b 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/DatasetDataElement.cs
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DatasetDataElement.cs
@@ -1,16 +1,15 @@
+//
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
-// Code generated by Microsoft (R) AutoRest Code Generator 1.1.0.0
+// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
+//
namespace Microsoft.Azure.Management.DataFactory.Models
{
- using Microsoft.Azure;
- using Microsoft.Azure.Management;
- using Microsoft.Azure.Management.DataFactory;
using Newtonsoft.Json;
using System.Linq;
@@ -24,7 +23,7 @@ public partial class DatasetDataElement
///
public DatasetDataElement()
{
- CustomInit();
+ CustomInit();
}
///
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DatasetSchemaDataElement.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DatasetSchemaDataElement.cs
new file mode 100644
index 000000000000..b79a156e2b66
--- /dev/null
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DatasetSchemaDataElement.cs
@@ -0,0 +1,75 @@
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License. See License.txt in the project root for
+// license information.
+//
+// Code generated by Microsoft (R) AutoRest Code Generator.
+// Changes may cause incorrect behavior and will be lost if the code is
+// regenerated.
+//
+
+namespace Microsoft.Azure.Management.DataFactory.Models
+{
+ using Newtonsoft.Json;
+ using System.Collections;
+ using System.Collections.Generic;
+ using System.Linq;
+
+ ///
+ /// Columns that define the physical type schema of the dataset.
+ ///
+ public partial class DatasetSchemaDataElement
+ {
+ ///
+ /// Initializes a new instance of the DatasetSchemaDataElement class.
+ ///
+ public DatasetSchemaDataElement()
+ {
+ CustomInit();
+ }
+
+ ///
+ /// Initializes a new instance of the DatasetSchemaDataElement class.
+ ///
+ /// Unmatched properties from the
+ /// message are deserialized this collection
+ /// Name of the schema column. Type: string (or
+ /// Expression with resultType string).
+ /// Type of the schema column. Type: string (or
+ /// Expression with resultType string).
+ public DatasetSchemaDataElement(IDictionary additionalProperties = default(IDictionary), object name = default(object), object type = default(object))
+ {
+ AdditionalProperties = additionalProperties;
+ Name = name;
+ Type = type;
+ CustomInit();
+ }
+
+ ///
+ /// An initialization method that performs custom operations like setting defaults
+ ///
+ partial void CustomInit();
+
+ ///
+ /// Gets or sets unmatched properties from the message are deserialized
+ /// this collection
+ ///
+ [JsonExtensionData]
+ public IDictionary AdditionalProperties { get; set; }
+
+ ///
+ /// Gets or sets name of the schema column. Type: string (or Expression
+ /// with resultType string).
+ ///
+ [JsonProperty(PropertyName = "name")]
+ public object Name { get; set; }
+
+ ///
+ /// Gets or sets type of the schema column. Type: string (or Expression
+ /// with resultType string).
+ ///
+ [JsonProperty(PropertyName = "type")]
+ public object Type { get; set; }
+
+ }
+}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DelimitedTextWriteSettings.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DelimitedTextWriteSettings.cs
index a54f9c2e64bb..31e6a7d966ef 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DelimitedTextWriteSettings.cs
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DelimitedTextWriteSettings.cs
@@ -39,11 +39,20 @@ public DelimitedTextWriteSettings()
/// Indicates whether string values should
/// always be enclosed with quotes. Type: boolean (or Expression with
/// resultType boolean).
- public DelimitedTextWriteSettings(object fileExtension, IDictionary additionalProperties = default(IDictionary), object quoteAllText = default(object))
+ /// Limit the written file's row count to
+ /// be smaller than or equal to the specified count. Type: integer (or
+ /// Expression with resultType integer).
+ /// Specifies the file name pattern
+ /// <fileNamePrefix>_<fileIndex>.<fileExtension> when
+ /// copy from non-file based store without partitionOptions. Type:
+ /// string (or Expression with resultType string).
+ public DelimitedTextWriteSettings(object fileExtension, IDictionary additionalProperties = default(IDictionary), object quoteAllText = default(object), object maxRowsPerFile = default(object), object fileNamePrefix = default(object))
: base(additionalProperties)
{
QuoteAllText = quoteAllText;
FileExtension = fileExtension;
+ MaxRowsPerFile = maxRowsPerFile;
+ FileNamePrefix = fileNamePrefix;
CustomInit();
}
@@ -67,6 +76,23 @@ public DelimitedTextWriteSettings()
[JsonProperty(PropertyName = "fileExtension")]
public object FileExtension { get; set; }
+ ///
+ /// Gets or sets limit the written file's row count to be smaller than
+ /// or equal to the specified count. Type: integer (or Expression with
+ /// resultType integer).
+ ///
+ [JsonProperty(PropertyName = "maxRowsPerFile")]
+ public object MaxRowsPerFile { get; set; }
+
+ ///
+ /// Gets or sets specifies the file name pattern
+ /// <fileNamePrefix>_<fileIndex>.<fileExtension>
+ /// when copy from non-file based store without partitionOptions. Type:
+ /// string (or Expression with resultType string).
+ ///
+ [JsonProperty(PropertyName = "fileNamePrefix")]
+ public object FileNamePrefix { get; set; }
+
///
/// Validate the object.
///
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/DynamicsSinkWriteBehavior.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DynamicsSinkWriteBehavior.cs
similarity index 83%
rename from sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/DynamicsSinkWriteBehavior.cs
rename to sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DynamicsSinkWriteBehavior.cs
index ab51de7a5277..6dda872f1716 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/DynamicsSinkWriteBehavior.cs
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DynamicsSinkWriteBehavior.cs
@@ -1,10 +1,12 @@
+//
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
-// Code generated by Microsoft (R) AutoRest Code Generator 1.1.0.0
+// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
+//
namespace Microsoft.Azure.Management.DataFactory.Models
{
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcSink.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcSink.cs
index 0eb07d896d9e..d90366477c64 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcSink.cs
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcSink.cs
@@ -47,10 +47,12 @@ public OrcSink()
/// connection count for the sink data store. Type: integer (or
/// Expression with resultType integer).
/// ORC store settings.
- public OrcSink(IDictionary additionalProperties = default(IDictionary), object writeBatchSize = default(object), object writeBatchTimeout = default(object), object sinkRetryCount = default(object), object sinkRetryWait = default(object), object maxConcurrentConnections = default(object), StoreWriteSettings storeSettings = default(StoreWriteSettings))
+ /// ORC format settings.
+ public OrcSink(IDictionary additionalProperties = default(IDictionary), object writeBatchSize = default(object), object writeBatchTimeout = default(object), object sinkRetryCount = default(object), object sinkRetryWait = default(object), object maxConcurrentConnections = default(object), StoreWriteSettings storeSettings = default(StoreWriteSettings), OrcWriteSettings formatSettings = default(OrcWriteSettings))
: base(additionalProperties, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections)
{
StoreSettings = storeSettings;
+ FormatSettings = formatSettings;
CustomInit();
}
@@ -65,5 +67,11 @@ public OrcSink()
[JsonProperty(PropertyName = "storeSettings")]
public StoreWriteSettings StoreSettings { get; set; }
+ ///
+ /// Gets or sets ORC format settings.
+ ///
+ [JsonProperty(PropertyName = "formatSettings")]
+ public OrcWriteSettings FormatSettings { get; set; }
+
}
}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcWriteSettings.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcWriteSettings.cs
new file mode 100644
index 000000000000..3dd01eab7727
--- /dev/null
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcWriteSettings.cs
@@ -0,0 +1,74 @@
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License. See License.txt in the project root for
+// license information.
+//
+// Code generated by Microsoft (R) AutoRest Code Generator.
+// Changes may cause incorrect behavior and will be lost if the code is
+// regenerated.
+//
+
+namespace Microsoft.Azure.Management.DataFactory.Models
+{
+ using Newtonsoft.Json;
+ using System.Collections;
+ using System.Collections.Generic;
+ using System.Linq;
+
+ ///
+ /// Orc write settings.
+ ///
+ public partial class OrcWriteSettings : FormatWriteSettings
+ {
+ ///
+ /// Initializes a new instance of the OrcWriteSettings class.
+ ///
+ public OrcWriteSettings()
+ {
+ CustomInit();
+ }
+
+ ///
+ /// Initializes a new instance of the OrcWriteSettings class.
+ ///
+ /// Unmatched properties from the
+ /// message are deserialized this collection
+ /// Limit the written file's row count to
+ /// be smaller than or equal to the specified count. Type: integer (or
+ /// Expression with resultType integer).
+ /// Specifies the file name pattern
+ /// <fileNamePrefix>_<fileIndex>.<fileExtension> when
+ /// copy from non-file based store without partitionOptions. Type:
+ /// string (or Expression with resultType string).
+ public OrcWriteSettings(IDictionary additionalProperties = default(IDictionary), object maxRowsPerFile = default(object), object fileNamePrefix = default(object))
+ : base(additionalProperties)
+ {
+ MaxRowsPerFile = maxRowsPerFile;
+ FileNamePrefix = fileNamePrefix;
+ CustomInit();
+ }
+
+ ///
+ /// An initialization method that performs custom operations like setting defaults
+ ///
+ partial void CustomInit();
+
+ ///
+ /// Gets or sets limit the written file's row count to be smaller than
+ /// or equal to the specified count. Type: integer (or Expression with
+ /// resultType integer).
+ ///
+ [JsonProperty(PropertyName = "maxRowsPerFile")]
+ public object MaxRowsPerFile { get; set; }
+
+ ///
+ /// Gets or sets specifies the file name pattern
+ /// <fileNamePrefix>_<fileIndex>.<fileExtension>
+ /// when copy from non-file based store without partitionOptions. Type:
+ /// string (or Expression with resultType string).
+ ///
+ [JsonProperty(PropertyName = "fileNamePrefix")]
+ public object FileNamePrefix { get; set; }
+
+ }
+}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/ParquetSink.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/ParquetSink.cs
index 67b95379fe09..5873985de449 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/ParquetSink.cs
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/ParquetSink.cs
@@ -47,10 +47,12 @@ public ParquetSink()
/// connection count for the sink data store. Type: integer (or
/// Expression with resultType integer).
/// Parquet store settings.
- public ParquetSink(IDictionary additionalProperties = default(IDictionary), object writeBatchSize = default(object), object writeBatchTimeout = default(object), object sinkRetryCount = default(object), object sinkRetryWait = default(object), object maxConcurrentConnections = default(object), StoreWriteSettings storeSettings = default(StoreWriteSettings))
+ /// Parquet format settings.
+ public ParquetSink(IDictionary additionalProperties = default(IDictionary), object writeBatchSize = default(object), object writeBatchTimeout = default(object), object sinkRetryCount = default(object), object sinkRetryWait = default(object), object maxConcurrentConnections = default(object), StoreWriteSettings storeSettings = default(StoreWriteSettings), ParquetWriteSettings formatSettings = default(ParquetWriteSettings))
: base(additionalProperties, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections)
{
StoreSettings = storeSettings;
+ FormatSettings = formatSettings;
CustomInit();
}
@@ -65,5 +67,11 @@ public ParquetSink()
[JsonProperty(PropertyName = "storeSettings")]
public StoreWriteSettings StoreSettings { get; set; }
+ ///
+ /// Gets or sets parquet format settings.
+ ///
+ [JsonProperty(PropertyName = "formatSettings")]
+ public ParquetWriteSettings FormatSettings { get; set; }
+
}
}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/ParquetWriteSettings.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/ParquetWriteSettings.cs
new file mode 100644
index 000000000000..5204159ed3aa
--- /dev/null
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/ParquetWriteSettings.cs
@@ -0,0 +1,74 @@
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License. See License.txt in the project root for
+// license information.
+//
+// Code generated by Microsoft (R) AutoRest Code Generator.
+// Changes may cause incorrect behavior and will be lost if the code is
+// regenerated.
+//
+
+namespace Microsoft.Azure.Management.DataFactory.Models
+{
+ using Newtonsoft.Json;
+ using System.Collections;
+ using System.Collections.Generic;
+ using System.Linq;
+
+ ///
+ /// Parquet write settings.
+ ///
+ public partial class ParquetWriteSettings : FormatWriteSettings
+ {
+ ///
+ /// Initializes a new instance of the ParquetWriteSettings class.
+ ///
+ public ParquetWriteSettings()
+ {
+ CustomInit();
+ }
+
+ ///
+ /// Initializes a new instance of the ParquetWriteSettings class.
+ ///
+ /// Unmatched properties from the
+ /// message are deserialized this collection
+ /// Limit the written file's row count to
+ /// be smaller than or equal to the specified count. Type: integer (or
+ /// Expression with resultType integer).
+ /// Specifies the file name pattern
+ /// <fileNamePrefix>_<fileIndex>.<fileExtension> when
+ /// copy from non-file based store without partitionOptions. Type:
+ /// string (or Expression with resultType string).
+ public ParquetWriteSettings(IDictionary additionalProperties = default(IDictionary), object maxRowsPerFile = default(object), object fileNamePrefix = default(object))
+ : base(additionalProperties)
+ {
+ MaxRowsPerFile = maxRowsPerFile;
+ FileNamePrefix = fileNamePrefix;
+ CustomInit();
+ }
+
+ ///
+ /// An initialization method that performs custom operations like setting defaults
+ ///
+ partial void CustomInit();
+
+ ///
+ /// Gets or sets limit the written file's row count to be smaller than
+ /// or equal to the specified count. Type: integer (or Expression with
+ /// resultType integer).
+ ///
+ [JsonProperty(PropertyName = "maxRowsPerFile")]
+ public object MaxRowsPerFile { get; set; }
+
+ ///
+ /// Gets or sets specifies the file name pattern
+ /// <fileNamePrefix>_<fileIndex>.<fileExtension>
+ /// when copy from non-file based store without partitionOptions. Type:
+ /// string (or Expression with resultType string).
+ ///
+ [JsonProperty(PropertyName = "fileNamePrefix")]
+ public object FileNamePrefix { get; set; }
+
+ }
+}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/TabularTranslator.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/TabularTranslator.cs
similarity index 56%
rename from sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/TabularTranslator.cs
rename to sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/TabularTranslator.cs
index c15d50bf35d1..b7f33e452c19 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/TabularTranslator.cs
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/TabularTranslator.cs
@@ -10,8 +10,10 @@
namespace Microsoft.Azure.Management.DataFactory.Models
{
- using System.Collections.Generic;
using Newtonsoft.Json;
+ using System.Collections;
+ using System.Collections.Generic;
+ using System.Linq;
///
/// A copy activity tabular translator.
@@ -33,36 +35,41 @@ public TabularTranslator()
/// message are deserialized this collection
/// Column mappings. Example: "UserId:
/// MyUserId, Group: MyGroup, Name: MyName" Type: string (or Expression
- /// with resultType string).
+ /// with resultType string). This property will be retired. Please use
+ /// mappings property.
/// The schema mapping to map between
/// tabular data and hierarchical data. Example: {"Column1":
/// "$.Column1", "Column2": "$.Column2.Property1", "Column3":
/// "$.Column2.Property2"}. Type: object (or Expression with resultType
- /// object).
+ /// object). This property will be retired. Please use mappings
+ /// property.
/// The JSON Path of the Nested Array
/// that is going to do cross-apply. Type: object (or Expression with
/// resultType object).
+ /// Whether to map complex
+ /// (array and object) values to simple strings in json format. Type:
+ /// boolean (or Expression with resultType boolean).
/// Column mappings with logical types.
- /// Tabular->tabular example: [{\"source\":{\"name\":\"CustomerName\",
- /// \"type\":\"String\"},\"sink\":{\"name\":\"ClientName\",\"type\":\"
- /// String\"}},{\"source\":{\"name\":\"CustomerAddress\",\"type\":\"
- /// String\"},\"sink\":{\"name\":\"ClientAddress\",\"type\":\"String\"}}].
- /// Hierarchical->tabular example: [{\"source\":{\"path\":\"$.CustomerName\",
- /// \"type\":\"String\"},\"sink\":{\"name\":\"ClientName\",\"type\":\"String\"}},
- /// {\"source\":{\"path\":\"$.CustomerAddress\",\"type\":\"String\"},\"sink\":
- /// {\"name\":\"ClientAddress\",\"type\":\"String\"}}]. Type: object
- /// (or Expression with resultType object).
- /// Whether to enable the advanced type conversion
- /// feature in the Copy activity. Type: boolean (or Expression with resultType
- /// boolean).
- /// Type conversion settings
- public TabularTranslator(IDictionary additionalProperties = default(IDictionary), object columnMappings = default(object), object schemaMapping = default(object), object collectionReference = default(object), object mappings = default(object), object typeConversion = default(object), TypeConversionSettings typeConversionSettings = default(TypeConversionSettings))
+ /// Tabular->tabular example:
+ /// [{"source":{"name":"CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"name":"CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}].
+ /// Hierarchical->tabular example:
+ /// [{"source":{"path":"$.CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"path":"$.CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}].
+ /// Type: object (or Expression with resultType object).
+ /// Whether to enable the advanced type
+ /// conversion feature in the Copy activity. Type: boolean (or
+ /// Expression with resultType boolean).
+ /// Type conversion
+ /// settings
+ public TabularTranslator(IDictionary additionalProperties = default(IDictionary), object columnMappings = default(object), object schemaMapping = default(object), object collectionReference = default(object), object mapComplexValuesToString = default(object), object mappings = default(object), object typeConversion = default(object), TypeConversionSettings typeConversionSettings = default(TypeConversionSettings))
: base(additionalProperties)
{
ColumnMappings = columnMappings;
SchemaMapping = schemaMapping;
CollectionReference = collectionReference;
+ MapComplexValuesToString = mapComplexValuesToString;
Mappings = mappings;
+ TypeConversion = typeConversion;
+ TypeConversionSettings = typeConversionSettings;
CustomInit();
}
@@ -72,9 +79,10 @@ public TabularTranslator()
partial void CustomInit();
///
- /// Gets or sets column mappings. Example: "UserId: MyUserId, Group:
+ /// Gets or sets column mappings. Example: "UserId: MyUserId, Group:
/// MyGroup, Name: MyName" Type: string (or Expression with resultType
- /// string).
+ /// string). This property will be retired. Please use mappings
+ /// property.
///
[JsonProperty(PropertyName = "columnMappings")]
public object ColumnMappings { get; set; }
@@ -83,7 +91,8 @@ public TabularTranslator()
/// Gets or sets the schema mapping to map between tabular data and
/// hierarchical data. Example: {"Column1": "$.Column1", "Column2":
/// "$.Column2.Property1", "Column3": "$.Column2.Property2"}. Type:
- /// object (or Expression with resultType object).
+ /// object (or Expression with resultType object). This property will
+ /// be retired. Please use mappings property.
///
[JsonProperty(PropertyName = "schemaMapping")]
public object SchemaMapping { get; set; }
@@ -96,31 +105,37 @@ public TabularTranslator()
public object CollectionReference { get; set; }
///
- /// Gets or sets the column mappings with logical types.
- /// Tabular->tabular example: [{\"source\":{\"name\":\"CustomerName\",
- /// \"type\":\"String\"},\"sink\":{\"name\":\"ClientName\",\"type\":\"
- /// String\"}},{\"source\":{\"name\":\"CustomerAddress\",\"type\":\"
- /// String\"},\"sink\":{\"name\":\"ClientAddress\",\"type\":\"String\"}}].
- /// Hierarchical->tabular example: [{\"source\":{\"path\":\"$.CustomerName\",
- /// \"type\":\"String\"},\"sink\":{\"name\":\"ClientName\",\"type\":\"String\"}},
- /// {\"source\":{\"path\":\"$.CustomerAddress\",\"type\":\"String\"},\"sink\":
- /// {\"name\":\"ClientAddress\",\"type\":\"String\"}}]. Type: object
- /// (or Expression with resultType object).
+ /// Gets or sets whether to map complex (array and object) values to
+ /// simple strings in json format. Type: boolean (or Expression with
+ /// resultType boolean).
+ ///
+ [JsonProperty(PropertyName = "mapComplexValuesToString")]
+ public object MapComplexValuesToString { get; set; }
+
+ ///
+ /// Gets or sets column mappings with logical types.
+ /// Tabular->tabular example:
+ /// [{"source":{"name":"CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"name":"CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}].
+ /// Hierarchical->tabular example:
+ /// [{"source":{"path":"$.CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"path":"$.CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}].
+ /// Type: object (or Expression with resultType object).
///
[JsonProperty(PropertyName = "mappings")]
public object Mappings { get; set; }
///
- /// Gets or sets typeConversion indicating whether to enable the advanced type conversion
- /// feature in the Copy activity. Type: boolean (or Expression with resultType boolean).
+ /// Gets or sets whether to enable the advanced type conversion feature
+ /// in the Copy activity. Type: boolean (or Expression with resultType
+ /// boolean).
///
[JsonProperty(PropertyName = "typeConversion")]
public object TypeConversion { get; set; }
///
- /// Gets or sets typeConversionSettings.
+ /// Gets or sets type conversion settings
///
[JsonProperty(PropertyName = "typeConversionSettings")]
public TypeConversionSettings TypeConversionSettings { get; set; }
+
}
}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/TypeConversionSettings.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/TypeConversionSettings.cs
similarity index 59%
rename from sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/TypeConversionSettings.cs
rename to sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/TypeConversionSettings.cs
index 02c57496faa8..57c7aa987dbe 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/TypeConversionSettings.cs
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/TypeConversionSettings.cs
@@ -1,4 +1,4 @@
-//
+//
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
@@ -8,12 +8,14 @@
// regenerated.
//
-
namespace Microsoft.Azure.Management.DataFactory.Models
{
- using System.Collections.Generic;
using Newtonsoft.Json;
+ using System.Linq;
+ ///
+ /// Type conversion settings
+ ///
public partial class TypeConversionSettings
{
///
@@ -27,17 +29,25 @@ public TypeConversionSettings()
///
/// Initializes a new instance of the TypeConversionSettings class.
///
- /// Unmatched properties from the message are deserialized this collection
- /// Whether to allow data truncation when converting the data. Type: boolean (or Expression with resultType boolean).
- /// Whether to treat boolean values as numbers. Type: boolean (or Expression with resultType boolean).
- /// The format for DateTime values. Type: string (or Expression with resultType string).
- /// The format for DateTimeOffset values. Type: string (or Expression with resultType string).
- /// The format for TimeSpan values. Type: string (or Expression with resultType string).
- /// The culture used to convert data from/to string. Type: string (or Expression with resultType string).
- public TypeConversionSettings(IDictionary additionalProperties = default(IDictionary), object allowDataTruncation = default(object), object treatBooleanAsNumber = default(object), object dateTimeFormat = default(object), object dateTimeOffsetFormat = default(object), object timeSpanFormat = default(object), object culture = default(object))
+ /// Whether to allow data truncation
+ /// when converting the data. Type: boolean (or Expression with
+ /// resultType boolean).
+ /// Whether to treat boolean values
+ /// as numbers. Type: boolean (or Expression with resultType
+ /// boolean).
+ /// The format for DateTime values. Type:
+ /// string (or Expression with resultType string).
+ /// The format for DateTimeOffset
+ /// values. Type: string (or Expression with resultType
+ /// string).
+ /// The format for TimeSpan values. Type:
+ /// string (or Expression with resultType string).
+ /// The culture used to convert data from/to
+ /// string. Type: string (or Expression with resultType
+ /// string).
+ public TypeConversionSettings(object allowDataTruncation = default(object), object treatBooleanAsNumber = default(object), object dateTimeFormat = default(object), object dateTimeOffsetFormat = default(object), object timeSpanFormat = default(object), object culture = default(object))
{
- AdditionalProperties = additionalProperties;
- AllowDataTruncation = AllowDataTruncation;
+ AllowDataTruncation = allowDataTruncation;
TreatBooleanAsNumber = treatBooleanAsNumber;
DateTimeFormat = dateTimeFormat;
DateTimeOffsetFormat = dateTimeOffsetFormat;
@@ -52,46 +62,46 @@ public TypeConversionSettings()
partial void CustomInit();
///
- /// Gets or sets unmatched properties from the message are deserialized
- /// this collection
- ///
- [JsonExtensionData]
- public IDictionary AdditionalProperties { get; set; }
-
- ///
- /// Gets or sets allowDataTruncation indicating whether to allow data truncation when converting the data. Type: boolean (or Expression with resultType boolean).
+ /// Gets or sets whether to allow data truncation when converting the
+ /// data. Type: boolean (or Expression with resultType boolean).
///
[JsonProperty(PropertyName = "allowDataTruncation")]
public object AllowDataTruncation { get; set; }
///
- /// Gets or sets treatBooleanAsNumber indicating whether to treat boolean values as numbers. Type: boolean (or Expression with resultType boolean).
+ /// Gets or sets whether to treat boolean values as numbers. Type:
+ /// boolean (or Expression with resultType boolean).
///
[JsonProperty(PropertyName = "treatBooleanAsNumber")]
public object TreatBooleanAsNumber { get; set; }
///
- /// Gets or sets the format for DateTime values. Type: string (or Expression with resultType string).
+ /// Gets or sets the format for DateTime values. Type: string (or
+ /// Expression with resultType string).
///
[JsonProperty(PropertyName = "dateTimeFormat")]
public object DateTimeFormat { get; set; }
///
- /// Gets or sets the format for DateTimeOffset values. Type: string (or Expression with resultType string).
+ /// Gets or sets the format for DateTimeOffset values. Type: string (or
+ /// Expression with resultType string).
///
[JsonProperty(PropertyName = "dateTimeOffsetFormat")]
public object DateTimeOffsetFormat { get; set; }
///
- /// Gets or sets the format for TimeSpan values. Type: string (or Expression with resultType string).
+ /// Gets or sets the format for TimeSpan values. Type: string (or
+ /// Expression with resultType string).
///
[JsonProperty(PropertyName = "timeSpanFormat")]
public object TimeSpanFormat { get; set; }
///
- /// Gets or sets the culture used to convert data from/to string. Type: string (or Expression with resultType string).
+ /// Gets or sets the culture used to convert data from/to string. Type:
+ /// string (or Expression with resultType string).
///
[JsonProperty(PropertyName = "culture")]
public object Culture { get; set; }
+
}
}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/SdkInfo_DataFactoryManagementClient.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/SdkInfo_DataFactoryManagementClient.cs
index c15c9ea7d5b4..2011a9871690 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/SdkInfo_DataFactoryManagementClient.cs
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/SdkInfo_DataFactoryManagementClient.cs
@@ -29,11 +29,14 @@ public static IEnumerable> ApiInfo_DataFactoryMana
new Tuple("DataFactory", "IntegrationRuntimeObjectMetadata", "2018-06-01"),
new Tuple("DataFactory", "IntegrationRuntimes", "2018-06-01"),
new Tuple("DataFactory", "LinkedServices", "2018-06-01"),
+ new Tuple("DataFactory", "ManagedPrivateEndpoints", "2018-06-01"),
+ new Tuple("DataFactory", "ManagedVirtualNetworks", "2018-06-01"),
new Tuple("DataFactory", "Operations", "2018-06-01"),
new Tuple("DataFactory", "PipelineRuns", "2018-06-01"),
new Tuple("DataFactory", "Pipelines", "2018-06-01"),
new Tuple("DataFactory", "TriggerRuns", "2018-06-01"),
new Tuple("DataFactory", "Triggers", "2018-06-01"),
+ new Tuple("DataFactory", "managedPrivateEndpoints", "2018-06-01"),
}.AsEnumerable();
}
}
@@ -43,7 +46,7 @@ public static IEnumerable> ApiInfo_DataFactoryMana
public static readonly String AutoRestCmdExecuted = "cmd.exe /c autorest.cmd https://github.com/Azure/azure-rest-api-specs/blob/master/specification/datafactory/resource-manager/readme.md --csharp --version=v2 --reflect-api-versions --tag=package-2018-06 --csharp-sdks-folder=D:\\Projects\\azure-sdk-for-net\\sdk";
public static readonly String GithubForkName = "Azure";
public static readonly String GithubBranchName = "master";
- public static readonly String GithubCommidId = "b8630cc7b5869fbb764eeca3a618b23141e612db";
+ public static readonly String GithubCommidId = "fce3400431eff281bddd04bed9727e63765b8da0";
public static readonly String CodeGenerationErrors = "";
public static readonly String GithubRepoName = "azure-rest-api-specs";
// END: Code Generation Metadata Section
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj
index fdd9b23a1107..faa5abee941f 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj
@@ -12,6 +12,8 @@
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/DatasetJsonSamples.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/DatasetJsonSamples.cs
index d3d6cda306b0..caae96128822 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/DatasetJsonSamples.cs
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/DatasetJsonSamples.cs
@@ -45,6 +45,27 @@ public class DatasetJsonSamples : JsonSampleCollection
}
}";
+ [JsonSample]
+ public const string AzureDatabricksDeltaLakeTable = @"
+{
+ name: ""AzureDatabricksDeltaLakeDataset"",
+ properties:
+ {
+ type: ""AzureDatabricksDeltaLakeDataset"",
+ linkedServiceName:
+ {
+ referenceName : ""ls"",
+ type : ""LinkedServiceReference""
+ },
+ typeProperties:
+ {
+ ""table"": ""test"",
+ ""database"": ""default""
+ }
+ }
+}
+";
+
[JsonSample]
public const string AzureTable = @"
{
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs
index fc970af02d2c..b1ebc369f2c3 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs
@@ -32,6 +32,25 @@ public class LinkedServiceJsonSamples : JsonSampleCollection
{
+ [JsonSample]
+ public const string AzureDatabricksDeltaLakeCopyActivity = @"
+{
+ ""name"": ""ExampleCopyActivity"",
+ ""properties"": {
+ ""activities"": [
+ {
+ ""name"": ""MyActivity"",
+ ""type"": ""Copy"",
+ ""typeProperties"": {
+ ""source"": {
+ ""type"": ""AzureDatabricksDeltaLakeSource"",
+ ""query"": ""abc"",
+ ""exportSettings"": {
+ ""type"": ""AzureDatabricksDeltaLakeExportCommand"",
+ ""dateFormat"": ""xxx"",
+ ""timestampFormat"": ""xxx""
+ }
+ },
+ ""sink"": {
+ ""type"": ""AzureDatabricksDeltaLakeSink"",
+ ""preCopyScript"": ""123"",
+ ""importSettings"": {
+ ""type"": ""AzureDatabricksDeltaLakeImportCommand"",
+ ""dateFormat"": ""xxx"",
+ ""timestampFormat"": ""xxx""
+ }
+ }
+ },
+ ""inputs"": [
+ {
+ ""referenceName"": ""exampleSourceDataset"",
+ ""type"": ""DatasetReference""
+ }
+ ],
+ ""outputs"": [
+ {
+ ""referenceName"": ""exampleSinkDataset"",
+ ""type"": ""DatasetReference""
+ }
+ ]
+ }
+ ]
+ }
+}
+";
+
[JsonSample]
public const string CopyActivity = @"
{
@@ -4374,7 +4421,9 @@ public class PipelineJsonSamples : JsonSampleCollection
""formatSettings"": {
""type"": ""DelimitedTextWriteSettings"",
""quoteAllText"": true,
- ""fileExtension"": "".csv""
+ ""fileExtension"": "".csv"",
+ ""maxRowsPerFile"": 10,
+ ""fileNamePrefix"": ""orcSinkFile""
}
},
""validateDataConsistency"": true,