diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AvroWriteSettings.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AvroWriteSettings.cs
index f0bdfe3267bd..dfa465d8007b 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AvroWriteSettings.cs
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AvroWriteSettings.cs
@@ -37,11 +37,20 @@ public AvroWriteSettings()
/// which is required in AVRO spec.
/// Record namespace in the write
/// result.
- public AvroWriteSettings(IDictionary additionalProperties = default(IDictionary), string recordName = default(string), string recordNamespace = default(string))
+ /// Limit the written file's row count to
+ /// be smaller than or equal to the specified count. Type: integer (or
+ /// Expression with resultType integer).
+ /// Specifies the file name pattern
+ /// <fileNamePrefix>_<fileIndex>.<fileExtension> when
+ /// copy from non-file based store without partitionOptions. Type:
+ /// string (or Expression with resultType string).
+ public AvroWriteSettings(IDictionary additionalProperties = default(IDictionary), string recordName = default(string), string recordNamespace = default(string), object maxRowsPerFile = default(object), object fileNamePrefix = default(object))
: base(additionalProperties)
{
RecordName = recordName;
RecordNamespace = recordNamespace;
+ MaxRowsPerFile = maxRowsPerFile;
+ FileNamePrefix = fileNamePrefix;
CustomInit();
}
@@ -63,5 +72,22 @@ public AvroWriteSettings()
[JsonProperty(PropertyName = "recordNamespace")]
public string RecordNamespace { get; set; }
+ ///
+ /// Gets or sets limit the written file's row count to be smaller than
+ /// or equal to the specified count. Type: integer (or Expression with
+ /// resultType integer).
+ ///
+ [JsonProperty(PropertyName = "maxRowsPerFile")]
+ public object MaxRowsPerFile { get; set; }
+
+ ///
+ /// Gets or sets specifies the file name pattern
+ /// <fileNamePrefix>_<fileIndex>.<fileExtension>
+ /// when copy from non-file based store without partitionOptions. Type:
+ /// string (or Expression with resultType string).
+ ///
+ [JsonProperty(PropertyName = "fileNamePrefix")]
+ public object FileNamePrefix { get; set; }
+
}
}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeDataset.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeDataset.cs
new file mode 100644
index 000000000000..88ce47ca4f48
--- /dev/null
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeDataset.cs
@@ -0,0 +1,97 @@
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License. See License.txt in the project root for
+// license information.
+//
+// Code generated by Microsoft (R) AutoRest Code Generator.
+// Changes may cause incorrect behavior and will be lost if the code is
+// regenerated.
+//
+
+namespace Microsoft.Azure.Management.DataFactory.Models
+{
+ using Microsoft.Rest;
+ using Microsoft.Rest.Serialization;
+ using Newtonsoft.Json;
+ using System.Collections;
+ using System.Collections.Generic;
+ using System.Linq;
+
+ ///
+ /// Azure Databricks Delta Lake dataset.
+ ///
+ [Rest.Serialization.JsonTransformation]
+ public partial class AzureDatabricksDeltaLakeDataset : Dataset
+ {
+ ///
+ /// Initializes a new instance of the AzureDatabricksDeltaLakeDataset
+ /// class.
+ ///
+ public AzureDatabricksDeltaLakeDataset()
+ {
+ LinkedServiceName = new LinkedServiceReference();
+ CustomInit();
+ }
+
+ ///
+ /// Initializes a new instance of the AzureDatabricksDeltaLakeDataset
+ /// class.
+ ///
+ /// Linked service reference.
+ /// Unmatched properties from the
+ /// message are deserialized this collection
+ /// Dataset description.
+ /// Columns that define the structure of the
+ /// dataset. Type: array (or Expression with resultType array),
+ /// itemType: DatasetDataElement.
+ /// Columns that define the physical type schema
+ /// of the dataset. Type: array (or Expression with resultType array),
+ /// itemType: DatasetSchemaDataElement.
+ /// Parameters for dataset.
+ /// List of tags that can be used for
+ /// describing the Dataset.
+ /// The folder that this Dataset is in. If not
+ /// specified, Dataset will appear at the root level.
+ /// The name of delta table. Type: string (or
+ /// Expression with resultType string).
+ /// The database name of delta table. Type:
+ /// string (or Expression with resultType string).
+ public AzureDatabricksDeltaLakeDataset(LinkedServiceReference linkedServiceName, IDictionary additionalProperties = default(IDictionary), string description = default(string), object structure = default(object), object schema = default(object), IDictionary parameters = default(IDictionary), IList annotations = default(IList), DatasetFolder folder = default(DatasetFolder), object table = default(object), object database = default(object))
+ : base(linkedServiceName, additionalProperties, description, structure, schema, parameters, annotations, folder)
+ {
+ Table = table;
+ Database = database;
+ CustomInit();
+ }
+
+ ///
+ /// An initialization method that performs custom operations like setting defaults
+ ///
+ partial void CustomInit();
+
+ ///
+ /// Gets or sets the name of delta table. Type: string (or Expression
+ /// with resultType string).
+ ///
+ [JsonProperty(PropertyName = "typeProperties.table")]
+ public object Table { get; set; }
+
+ ///
+ /// Gets or sets the database name of delta table. Type: string (or
+ /// Expression with resultType string).
+ ///
+ [JsonProperty(PropertyName = "typeProperties.database")]
+ public object Database { get; set; }
+
+ ///
+ /// Validate the object.
+ ///
+ ///
+ /// Thrown if validation fails
+ ///
+ public override void Validate()
+ {
+ base.Validate();
+ }
+ }
+}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeExportCommand.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeExportCommand.cs
new file mode 100644
index 000000000000..2a95efec6934
--- /dev/null
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeExportCommand.cs
@@ -0,0 +1,74 @@
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License. See License.txt in the project root for
+// license information.
+//
+// Code generated by Microsoft (R) AutoRest Code Generator.
+// Changes may cause incorrect behavior and will be lost if the code is
+// regenerated.
+//
+
+namespace Microsoft.Azure.Management.DataFactory.Models
+{
+ using Newtonsoft.Json;
+ using System.Collections;
+ using System.Collections.Generic;
+ using System.Linq;
+
+ ///
+ /// Azure Databricks Delta Lake export command settings.
+ ///
+ public partial class AzureDatabricksDeltaLakeExportCommand : ExportSettings
+ {
+ ///
+ /// Initializes a new instance of the
+ /// AzureDatabricksDeltaLakeExportCommand class.
+ ///
+ public AzureDatabricksDeltaLakeExportCommand()
+ {
+ CustomInit();
+ }
+
+ ///
+ /// Initializes a new instance of the
+ /// AzureDatabricksDeltaLakeExportCommand class.
+ ///
+ /// Unmatched properties from the
+ /// message are deserialized this collection
+ /// Specify the date format for the csv in
+ /// Azure Databricks Delta Lake Copy. Type: string (or Expression with
+ /// resultType string).
+ /// Specify the timestamp format for the
+ /// csv in Azure Databricks Delta Lake Copy. Type: string (or
+ /// Expression with resultType string).
+ public AzureDatabricksDeltaLakeExportCommand(IDictionary additionalProperties = default(IDictionary), object dateFormat = default(object), object timestampFormat = default(object))
+ : base(additionalProperties)
+ {
+ DateFormat = dateFormat;
+ TimestampFormat = timestampFormat;
+ CustomInit();
+ }
+
+ ///
+ /// An initialization method that performs custom operations like setting defaults
+ ///
+ partial void CustomInit();
+
+ ///
+ /// Gets or sets specify the date format for the csv in Azure
+ /// Databricks Delta Lake Copy. Type: string (or Expression with
+ /// resultType string).
+ ///
+ [JsonProperty(PropertyName = "dateFormat")]
+ public object DateFormat { get; set; }
+
+ ///
+ /// Gets or sets specify the timestamp format for the csv in Azure
+ /// Databricks Delta Lake Copy. Type: string (or Expression with
+ /// resultType string).
+ ///
+ [JsonProperty(PropertyName = "timestampFormat")]
+ public object TimestampFormat { get; set; }
+
+ }
+}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeImportCommand.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeImportCommand.cs
new file mode 100644
index 000000000000..79e29fe8232d
--- /dev/null
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeImportCommand.cs
@@ -0,0 +1,74 @@
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License. See License.txt in the project root for
+// license information.
+//
+// Code generated by Microsoft (R) AutoRest Code Generator.
+// Changes may cause incorrect behavior and will be lost if the code is
+// regenerated.
+//
+
+namespace Microsoft.Azure.Management.DataFactory.Models
+{
+ using Newtonsoft.Json;
+ using System.Collections;
+ using System.Collections.Generic;
+ using System.Linq;
+
+ ///
+ /// Azure Databricks Delta Lake import command settings.
+ ///
+ public partial class AzureDatabricksDeltaLakeImportCommand : ImportSettings
+ {
+ ///
+ /// Initializes a new instance of the
+ /// AzureDatabricksDeltaLakeImportCommand class.
+ ///
+ public AzureDatabricksDeltaLakeImportCommand()
+ {
+ CustomInit();
+ }
+
+ ///
+ /// Initializes a new instance of the
+ /// AzureDatabricksDeltaLakeImportCommand class.
+ ///
+ /// Unmatched properties from the
+ /// message are deserialized this collection
+ /// Specify the date format for csv in Azure
+ /// Databricks Delta Lake Copy. Type: string (or Expression with
+ /// resultType string).
+ /// Specify the timestamp format for csv
+ /// in Azure Databricks Delta Lake Copy. Type: string (or Expression
+ /// with resultType string).
+ public AzureDatabricksDeltaLakeImportCommand(IDictionary additionalProperties = default(IDictionary), object dateFormat = default(object), object timestampFormat = default(object))
+ : base(additionalProperties)
+ {
+ DateFormat = dateFormat;
+ TimestampFormat = timestampFormat;
+ CustomInit();
+ }
+
+ ///
+ /// An initialization method that performs custom operations like setting defaults
+ ///
+ partial void CustomInit();
+
+ ///
+ /// Gets or sets specify the date format for csv in Azure Databricks
+ /// Delta Lake Copy. Type: string (or Expression with resultType
+ /// string).
+ ///
+ [JsonProperty(PropertyName = "dateFormat")]
+ public object DateFormat { get; set; }
+
+ ///
+ /// Gets or sets specify the timestamp format for csv in Azure
+ /// Databricks Delta Lake Copy. Type: string (or Expression with
+ /// resultType string).
+ ///
+ [JsonProperty(PropertyName = "timestampFormat")]
+ public object TimestampFormat { get; set; }
+
+ }
+}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeLinkedService.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeLinkedService.cs
new file mode 100644
index 000000000000..ec88ac66a5ac
--- /dev/null
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeLinkedService.cs
@@ -0,0 +1,123 @@
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License. See License.txt in the project root for
+// license information.
+//
+// Code generated by Microsoft (R) AutoRest Code Generator.
+// Changes may cause incorrect behavior and will be lost if the code is
+// regenerated.
+//
+
+namespace Microsoft.Azure.Management.DataFactory.Models
+{
+ using Microsoft.Rest;
+ using Microsoft.Rest.Serialization;
+ using Newtonsoft.Json;
+ using System.Collections;
+ using System.Collections.Generic;
+ using System.Linq;
+
+ ///
+ /// Azure Databricks Delta Lake linked service.
+ ///
+ [Newtonsoft.Json.JsonObject("AzureDatabricksDeltaLake")]
+ [Rest.Serialization.JsonTransformation]
+ public partial class AzureDatabricksDeltaLakeLinkedService : LinkedService
+ {
+ ///
+ /// Initializes a new instance of the
+ /// AzureDatabricksDeltaLakeLinkedService class.
+ ///
+ public AzureDatabricksDeltaLakeLinkedService()
+ {
+ CustomInit();
+ }
+
+ ///
+ /// Initializes a new instance of the
+ /// AzureDatabricksDeltaLakeLinkedService class.
+ ///
+ /// <REGION>.azuredatabricks.net, domain
+ /// name of your Databricks deployment. Type: string (or Expression
+ /// with resultType string).
+ /// Unmatched properties from the
+ /// message are deserialized this collection
+ /// The integration runtime reference.
+ /// Linked service description.
+ /// Parameters for linked service.
+ /// List of tags that can be used for
+ /// describing the linked service.
+ /// Access token for databricks REST API.
+ /// Refer to
+ /// https://docs.azuredatabricks.net/api/latest/authentication.html.
+ /// Type: string, SecureString or AzureKeyVaultSecretReference.
+ /// The id of an existing interactive cluster
+ /// that will be used for all runs of this job. Type: string (or
+ /// Expression with resultType string).
+ /// The encrypted credential used for
+ /// authentication. Credentials are encrypted using the integration
+ /// runtime credential manager. Type: string (or Expression with
+ /// resultType string).
+ public AzureDatabricksDeltaLakeLinkedService(object domain, IDictionary additionalProperties = default(IDictionary), IntegrationRuntimeReference connectVia = default(IntegrationRuntimeReference), string description = default(string), IDictionary parameters = default(IDictionary), IList annotations = default(IList), SecretBase accessToken = default(SecretBase), object clusterId = default(object), object encryptedCredential = default(object))
+ : base(additionalProperties, connectVia, description, parameters, annotations)
+ {
+ Domain = domain;
+ AccessToken = accessToken;
+ ClusterId = clusterId;
+ EncryptedCredential = encryptedCredential;
+ CustomInit();
+ }
+
+ ///
+ /// An initialization method that performs custom operations like setting defaults
+ ///
+ partial void CustomInit();
+
+ ///
+ /// Gets or sets <REGION>.azuredatabricks.net, domain
+ /// name of your Databricks deployment. Type: string (or Expression
+ /// with resultType string).
+ ///
+ [JsonProperty(PropertyName = "typeProperties.domain")]
+ public object Domain { get; set; }
+
+ ///
+ /// Gets or sets access token for databricks REST API. Refer to
+ /// https://docs.azuredatabricks.net/api/latest/authentication.html.
+ /// Type: string, SecureString or AzureKeyVaultSecretReference.
+ ///
+ [JsonProperty(PropertyName = "typeProperties.accessToken")]
+ public SecretBase AccessToken { get; set; }
+
+ ///
+ /// Gets or sets the id of an existing interactive cluster that will be
+ /// used for all runs of this job. Type: string (or Expression with
+ /// resultType string).
+ ///
+ [JsonProperty(PropertyName = "typeProperties.clusterId")]
+ public object ClusterId { get; set; }
+
+ ///
+ /// Gets or sets the encrypted credential used for authentication.
+ /// Credentials are encrypted using the integration runtime credential
+ /// manager. Type: string (or Expression with resultType string).
+ ///
+ [JsonProperty(PropertyName = "typeProperties.encryptedCredential")]
+ public object EncryptedCredential { get; set; }
+
+ ///
+ /// Validate the object.
+ ///
+ ///
+ /// Thrown if validation fails
+ ///
+ public override void Validate()
+ {
+ base.Validate();
+ if (Domain == null)
+ {
+ throw new ValidationException(ValidationRules.CannotBeNull, "Domain");
+ }
+ }
+ }
+}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeSink.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeSink.cs
new file mode 100644
index 000000000000..7df2d3a1dd14
--- /dev/null
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeSink.cs
@@ -0,0 +1,82 @@
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License. See License.txt in the project root for
+// license information.
+//
+// Code generated by Microsoft (R) AutoRest Code Generator.
+// Changes may cause incorrect behavior and will be lost if the code is
+// regenerated.
+//
+
+namespace Microsoft.Azure.Management.DataFactory.Models
+{
+ using Newtonsoft.Json;
+ using System.Collections;
+ using System.Collections.Generic;
+ using System.Linq;
+
+ ///
+ /// A copy activity Azure Databricks Delta Lake sink.
+ ///
+ public partial class AzureDatabricksDeltaLakeSink : CopySink
+ {
+ ///
+ /// Initializes a new instance of the AzureDatabricksDeltaLakeSink
+ /// class.
+ ///
+ public AzureDatabricksDeltaLakeSink()
+ {
+ CustomInit();
+ }
+
+ ///
+ /// Initializes a new instance of the AzureDatabricksDeltaLakeSink
+ /// class.
+ ///
+ /// Unmatched properties from the
+ /// message are deserialized this collection
+ /// Write batch size. Type: integer (or
+ /// Expression with resultType integer), minimum: 0.
+ /// Write batch timeout. Type: string
+ /// (or Expression with resultType string), pattern:
+ /// ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ /// Sink retry count. Type: integer (or
+ /// Expression with resultType integer).
+ /// Sink retry wait. Type: string (or
+ /// Expression with resultType string), pattern:
+ /// ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ /// The maximum concurrent
+ /// connection count for the sink data store. Type: integer (or
+ /// Expression with resultType integer).
+ /// SQL pre-copy script. Type: string (or
+ /// Expression with resultType string).
+ /// Azure Databricks Delta Lake import
+ /// settings.
+ public AzureDatabricksDeltaLakeSink(IDictionary additionalProperties = default(IDictionary), object writeBatchSize = default(object), object writeBatchTimeout = default(object), object sinkRetryCount = default(object), object sinkRetryWait = default(object), object maxConcurrentConnections = default(object), object preCopyScript = default(object), AzureDatabricksDeltaLakeImportCommand importSettings = default(AzureDatabricksDeltaLakeImportCommand))
+ : base(additionalProperties, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections)
+ {
+ PreCopyScript = preCopyScript;
+ ImportSettings = importSettings;
+ CustomInit();
+ }
+
+ ///
+ /// An initialization method that performs custom operations like setting defaults
+ ///
+ partial void CustomInit();
+
+ ///
+ /// Gets or sets SQL pre-copy script. Type: string (or Expression with
+ /// resultType string).
+ ///
+ [JsonProperty(PropertyName = "preCopyScript")]
+ public object PreCopyScript { get; set; }
+
+ ///
+ /// Gets or sets azure Databricks Delta Lake import settings.
+ ///
+ [JsonProperty(PropertyName = "importSettings")]
+ public AzureDatabricksDeltaLakeImportCommand ImportSettings { get; set; }
+
+ }
+}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeSource.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeSource.cs
new file mode 100644
index 000000000000..55f52dbb477d
--- /dev/null
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeSource.cs
@@ -0,0 +1,77 @@
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License. See License.txt in the project root for
+// license information.
+//
+// Code generated by Microsoft (R) AutoRest Code Generator.
+// Changes may cause incorrect behavior and will be lost if the code is
+// regenerated.
+//
+
+namespace Microsoft.Azure.Management.DataFactory.Models
+{
+ using Newtonsoft.Json;
+ using System.Collections;
+ using System.Collections.Generic;
+ using System.Linq;
+
+ ///
+ /// A copy activity Azure Databricks Delta Lake source.
+ ///
+ public partial class AzureDatabricksDeltaLakeSource : CopySource
+ {
+ ///
+ /// Initializes a new instance of the AzureDatabricksDeltaLakeSource
+ /// class.
+ ///
+ public AzureDatabricksDeltaLakeSource()
+ {
+ CustomInit();
+ }
+
+ ///
+ /// Initializes a new instance of the AzureDatabricksDeltaLakeSource
+ /// class.
+ ///
+ /// Unmatched properties from the
+ /// message are deserialized this collection
+ /// Source retry count. Type: integer
+ /// (or Expression with resultType integer).
+ /// Source retry wait. Type: string (or
+ /// Expression with resultType string), pattern:
+ /// ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ /// The maximum concurrent
+ /// connection count for the source data store. Type: integer (or
+ /// Expression with resultType integer).
+ /// Azure Databricks Delta Lake Sql query. Type:
+ /// string (or Expression with resultType string).
+ /// Azure Databricks Delta Lake export
+ /// settings.
+ public AzureDatabricksDeltaLakeSource(IDictionary additionalProperties = default(IDictionary), object sourceRetryCount = default(object), object sourceRetryWait = default(object), object maxConcurrentConnections = default(object), object query = default(object), AzureDatabricksDeltaLakeExportCommand exportSettings = default(AzureDatabricksDeltaLakeExportCommand))
+ : base(additionalProperties, sourceRetryCount, sourceRetryWait, maxConcurrentConnections)
+ {
+ Query = query;
+ ExportSettings = exportSettings;
+ CustomInit();
+ }
+
+ ///
+ /// An initialization method that performs custom operations like setting defaults
+ ///
+ partial void CustomInit();
+
+ ///
+ /// Gets or sets azure Databricks Delta Lake Sql query. Type: string
+ /// (or Expression with resultType string).
+ ///
+ [JsonProperty(PropertyName = "query")]
+ public object Query { get; set; }
+
+ ///
+ /// Gets or sets azure Databricks Delta Lake export settings.
+ ///
+ [JsonProperty(PropertyName = "exportSettings")]
+ public AzureDatabricksDeltaLakeExportCommand ExportSettings { get; set; }
+
+ }
+}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DelimitedTextWriteSettings.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DelimitedTextWriteSettings.cs
index a54f9c2e64bb..31e6a7d966ef 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DelimitedTextWriteSettings.cs
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DelimitedTextWriteSettings.cs
@@ -39,11 +39,20 @@ public DelimitedTextWriteSettings()
/// Indicates whether string values should
/// always be enclosed with quotes. Type: boolean (or Expression with
/// resultType boolean).
- public DelimitedTextWriteSettings(object fileExtension, IDictionary additionalProperties = default(IDictionary), object quoteAllText = default(object))
+ /// Limit the written file's row count to
+ /// be smaller than or equal to the specified count. Type: integer (or
+ /// Expression with resultType integer).
+ /// Specifies the file name pattern
+ /// <fileNamePrefix>_<fileIndex>.<fileExtension> when
+ /// copy from non-file based store without partitionOptions. Type:
+ /// string (or Expression with resultType string).
+ public DelimitedTextWriteSettings(object fileExtension, IDictionary additionalProperties = default(IDictionary), object quoteAllText = default(object), object maxRowsPerFile = default(object), object fileNamePrefix = default(object))
: base(additionalProperties)
{
QuoteAllText = quoteAllText;
FileExtension = fileExtension;
+ MaxRowsPerFile = maxRowsPerFile;
+ FileNamePrefix = fileNamePrefix;
CustomInit();
}
@@ -67,6 +76,23 @@ public DelimitedTextWriteSettings()
[JsonProperty(PropertyName = "fileExtension")]
public object FileExtension { get; set; }
+ ///
+ /// Gets or sets limit the written file's row count to be smaller than
+ /// or equal to the specified count. Type: integer (or Expression with
+ /// resultType integer).
+ ///
+ [JsonProperty(PropertyName = "maxRowsPerFile")]
+ public object MaxRowsPerFile { get; set; }
+
+ ///
+ /// Gets or sets specifies the file name pattern
+ /// <fileNamePrefix>_<fileIndex>.<fileExtension>
+ /// when copy from non-file based store without partitionOptions. Type:
+ /// string (or Expression with resultType string).
+ ///
+ [JsonProperty(PropertyName = "fileNamePrefix")]
+ public object FileNamePrefix { get; set; }
+
///
/// Validate the object.
///
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcSink.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcSink.cs
index 0eb07d896d9e..d90366477c64 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcSink.cs
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcSink.cs
@@ -47,10 +47,12 @@ public OrcSink()
/// connection count for the sink data store. Type: integer (or
/// Expression with resultType integer).
/// ORC store settings.
- public OrcSink(IDictionary additionalProperties = default(IDictionary), object writeBatchSize = default(object), object writeBatchTimeout = default(object), object sinkRetryCount = default(object), object sinkRetryWait = default(object), object maxConcurrentConnections = default(object), StoreWriteSettings storeSettings = default(StoreWriteSettings))
+ /// ORC format settings.
+ public OrcSink(IDictionary additionalProperties = default(IDictionary), object writeBatchSize = default(object), object writeBatchTimeout = default(object), object sinkRetryCount = default(object), object sinkRetryWait = default(object), object maxConcurrentConnections = default(object), StoreWriteSettings storeSettings = default(StoreWriteSettings), OrcWriteSettings formatSettings = default(OrcWriteSettings))
: base(additionalProperties, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections)
{
StoreSettings = storeSettings;
+ FormatSettings = formatSettings;
CustomInit();
}
@@ -65,5 +67,11 @@ public OrcSink()
[JsonProperty(PropertyName = "storeSettings")]
public StoreWriteSettings StoreSettings { get; set; }
+ ///
+ /// Gets or sets ORC format settings.
+ ///
+ [JsonProperty(PropertyName = "formatSettings")]
+ public OrcWriteSettings FormatSettings { get; set; }
+
}
}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcWriteSettings.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcWriteSettings.cs
new file mode 100644
index 000000000000..3dd01eab7727
--- /dev/null
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcWriteSettings.cs
@@ -0,0 +1,74 @@
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License. See License.txt in the project root for
+// license information.
+//
+// Code generated by Microsoft (R) AutoRest Code Generator.
+// Changes may cause incorrect behavior and will be lost if the code is
+// regenerated.
+//
+
+namespace Microsoft.Azure.Management.DataFactory.Models
+{
+ using Newtonsoft.Json;
+ using System.Collections;
+ using System.Collections.Generic;
+ using System.Linq;
+
+ ///
+ /// Orc write settings.
+ ///
+ public partial class OrcWriteSettings : FormatWriteSettings
+ {
+ ///
+ /// Initializes a new instance of the OrcWriteSettings class.
+ ///
+ public OrcWriteSettings()
+ {
+ CustomInit();
+ }
+
+ ///
+ /// Initializes a new instance of the OrcWriteSettings class.
+ ///
+ /// Unmatched properties from the
+ /// message are deserialized this collection
+ /// Limit the written file's row count to
+ /// be smaller than or equal to the specified count. Type: integer (or
+ /// Expression with resultType integer).
+ /// Specifies the file name pattern
+ /// <fileNamePrefix>_<fileIndex>.<fileExtension> when
+ /// copy from non-file based store without partitionOptions. Type:
+ /// string (or Expression with resultType string).
+ public OrcWriteSettings(IDictionary additionalProperties = default(IDictionary), object maxRowsPerFile = default(object), object fileNamePrefix = default(object))
+ : base(additionalProperties)
+ {
+ MaxRowsPerFile = maxRowsPerFile;
+ FileNamePrefix = fileNamePrefix;
+ CustomInit();
+ }
+
+ ///
+ /// An initialization method that performs custom operations like setting defaults
+ ///
+ partial void CustomInit();
+
+ ///
+ /// Gets or sets limit the written file's row count to be smaller than
+ /// or equal to the specified count. Type: integer (or Expression with
+ /// resultType integer).
+ ///
+ [JsonProperty(PropertyName = "maxRowsPerFile")]
+ public object MaxRowsPerFile { get; set; }
+
+ ///
+ /// Gets or sets specifies the file name pattern
+ /// <fileNamePrefix>_<fileIndex>.<fileExtension>
+ /// when copy from non-file based store without partitionOptions. Type:
+ /// string (or Expression with resultType string).
+ ///
+ [JsonProperty(PropertyName = "fileNamePrefix")]
+ public object FileNamePrefix { get; set; }
+
+ }
+}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/ParquetSink.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/ParquetSink.cs
index 67b95379fe09..5873985de449 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/ParquetSink.cs
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/ParquetSink.cs
@@ -47,10 +47,12 @@ public ParquetSink()
/// connection count for the sink data store. Type: integer (or
/// Expression with resultType integer).
/// Parquet store settings.
- public ParquetSink(IDictionary additionalProperties = default(IDictionary), object writeBatchSize = default(object), object writeBatchTimeout = default(object), object sinkRetryCount = default(object), object sinkRetryWait = default(object), object maxConcurrentConnections = default(object), StoreWriteSettings storeSettings = default(StoreWriteSettings))
+ /// Parquet format settings.
+ public ParquetSink(IDictionary additionalProperties = default(IDictionary), object writeBatchSize = default(object), object writeBatchTimeout = default(object), object sinkRetryCount = default(object), object sinkRetryWait = default(object), object maxConcurrentConnections = default(object), StoreWriteSettings storeSettings = default(StoreWriteSettings), ParquetWriteSettings formatSettings = default(ParquetWriteSettings))
: base(additionalProperties, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections)
{
StoreSettings = storeSettings;
+ FormatSettings = formatSettings;
CustomInit();
}
@@ -65,5 +67,11 @@ public ParquetSink()
[JsonProperty(PropertyName = "storeSettings")]
public StoreWriteSettings StoreSettings { get; set; }
+ ///
+ /// Gets or sets parquet format settings.
+ ///
+ [JsonProperty(PropertyName = "formatSettings")]
+ public ParquetWriteSettings FormatSettings { get; set; }
+
}
}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/ParquetWriteSettings.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/ParquetWriteSettings.cs
new file mode 100644
index 000000000000..5204159ed3aa
--- /dev/null
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/ParquetWriteSettings.cs
@@ -0,0 +1,74 @@
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License. See License.txt in the project root for
+// license information.
+//
+// Code generated by Microsoft (R) AutoRest Code Generator.
+// Changes may cause incorrect behavior and will be lost if the code is
+// regenerated.
+//
+
+namespace Microsoft.Azure.Management.DataFactory.Models
+{
+ using Newtonsoft.Json;
+ using System.Collections;
+ using System.Collections.Generic;
+ using System.Linq;
+
+ ///
+ /// Parquet write settings.
+ ///
+ public partial class ParquetWriteSettings : FormatWriteSettings
+ {
+ ///
+ /// Initializes a new instance of the ParquetWriteSettings class.
+ ///
+ public ParquetWriteSettings()
+ {
+ CustomInit();
+ }
+
+ ///
+ /// Initializes a new instance of the ParquetWriteSettings class.
+ ///
+ /// Unmatched properties from the
+ /// message are deserialized this collection
+ /// Limit the written file's row count to
+ /// be smaller than or equal to the specified count. Type: integer (or
+ /// Expression with resultType integer).
+ /// Specifies the file name pattern
+ /// <fileNamePrefix>_<fileIndex>.<fileExtension> when
+ /// copy from non-file based store without partitionOptions. Type:
+ /// string (or Expression with resultType string).
+ public ParquetWriteSettings(IDictionary additionalProperties = default(IDictionary), object maxRowsPerFile = default(object), object fileNamePrefix = default(object))
+ : base(additionalProperties)
+ {
+ MaxRowsPerFile = maxRowsPerFile;
+ FileNamePrefix = fileNamePrefix;
+ CustomInit();
+ }
+
+ ///
+ /// An initialization method that performs custom operations like setting defaults
+ ///
+ partial void CustomInit();
+
+ ///
+ /// Gets or sets limit the written file's row count to be smaller than
+ /// or equal to the specified count. Type: integer (or Expression with
+ /// resultType integer).
+ ///
+ [JsonProperty(PropertyName = "maxRowsPerFile")]
+ public object MaxRowsPerFile { get; set; }
+
+ ///
+ /// Gets or sets specifies the file name pattern
+ /// <fileNamePrefix>_<fileIndex>.<fileExtension>
+ /// when copy from non-file based store without partitionOptions. Type:
+ /// string (or Expression with resultType string).
+ ///
+ [JsonProperty(PropertyName = "fileNamePrefix")]
+ public object FileNamePrefix { get; set; }
+
+ }
+}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/SsisPackageLocation.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/SSISPackageLocation.cs
similarity index 100%
rename from sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/SsisPackageLocation.cs
rename to sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/SSISPackageLocation.cs
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/SdkInfo_DataFactoryManagementClient.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/SdkInfo_DataFactoryManagementClient.cs
index c15c9ea7d5b4..75005e8302fd 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/SdkInfo_DataFactoryManagementClient.cs
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/SdkInfo_DataFactoryManagementClient.cs
@@ -29,24 +29,16 @@ public static IEnumerable> ApiInfo_DataFactoryMana
new Tuple("DataFactory", "IntegrationRuntimeObjectMetadata", "2018-06-01"),
new Tuple("DataFactory", "IntegrationRuntimes", "2018-06-01"),
new Tuple("DataFactory", "LinkedServices", "2018-06-01"),
+ new Tuple("DataFactory", "ManagedPrivateEndpoints", "2018-06-01"),
+ new Tuple("DataFactory", "ManagedVirtualNetworks", "2018-06-01"),
new Tuple("DataFactory", "Operations", "2018-06-01"),
new Tuple("DataFactory", "PipelineRuns", "2018-06-01"),
new Tuple("DataFactory", "Pipelines", "2018-06-01"),
new Tuple("DataFactory", "TriggerRuns", "2018-06-01"),
new Tuple("DataFactory", "Triggers", "2018-06-01"),
+ new Tuple("DataFactory", "managedPrivateEndpoints", "2018-06-01"),
}.AsEnumerable();
}
}
- // BEGIN: Code Generation Metadata Section
- public static readonly String AutoRestVersion = "v2";
- public static readonly String AutoRestBootStrapperVersion = "autorest@2.0.4413";
- public static readonly String AutoRestCmdExecuted = "cmd.exe /c autorest.cmd https://github.com/Azure/azure-rest-api-specs/blob/master/specification/datafactory/resource-manager/readme.md --csharp --version=v2 --reflect-api-versions --tag=package-2018-06 --csharp-sdks-folder=D:\\Projects\\azure-sdk-for-net\\sdk";
- public static readonly String GithubForkName = "Azure";
- public static readonly String GithubBranchName = "master";
- public static readonly String GithubCommidId = "b8630cc7b5869fbb764eeca3a618b23141e612db";
- public static readonly String CodeGenerationErrors = "";
- public static readonly String GithubRepoName = "azure-rest-api-specs";
- // END: Code Generation Metadata Section
}
}
-