From 19717fcc9deb138822e305b678d02fad658071cf Mon Sep 17 00:00:00 2001 From: Jingshu Pan Date: Mon, 21 Sep 2020 15:28:09 +0800 Subject: [PATCH 1/9] [DataFactory]SDK change in 09.21 --- .../datafactory_resource-manager.txt | 4 +- .../src/CHANGELOG.md | 2 + .../src/Extensions/TabularPartitionOption.cs | 23 ---- .../Generated/DataFactoryManagementClient.cs | 2 + .../src/Generated/Models/AvroWriteSettings.cs | 28 +++- .../Models/AzureDatabricksDeltaLakeDataset.cs | 97 ++++++++++++++ .../AzureDatabricksDeltaLakeExportCommand.cs | 74 +++++++++++ .../AzureDatabricksDeltaLakeImportCommand.cs | 74 +++++++++++ .../AzureDatabricksDeltaLakeLinkedService.cs | 123 ++++++++++++++++++ .../Models/AzureDatabricksDeltaLakeSink.cs | 82 ++++++++++++ .../Models/AzureDatabricksDeltaLakeSource.cs | 77 +++++++++++ .../Models}/CopyBehaviorType.cs | 4 +- .../Models}/CopyTranslator.cs | 0 .../Models}/DatasetDataElement.cs | 9 +- .../Models/DatasetSchemaDataElement.cs | 75 +++++++++++ .../Models/DelimitedTextWriteSettings.cs | 28 +++- .../Models}/DynamicsSinkWriteBehavior.cs | 4 +- .../src/Generated/Models/OrcSink.cs | 10 +- .../src/Generated/Models/OrcWriteSettings.cs | 74 +++++++++++ .../src/Generated/Models/ParquetSink.cs | 10 +- .../Generated/Models/ParquetWriteSettings.cs | 74 +++++++++++ .../Models}/TabularTranslator.cs | 81 +++++++----- .../Models}/TypeConversionSettings.cs | 62 +++++---- .../SdkInfo_DataFactoryManagementClient.cs | 5 +- ...rosoft.Azure.Management.DataFactory.csproj | 2 + .../tests/JsonSamples/DatasetJsonSamples.cs | 21 +++ .../JsonSamples/LinkedServiceJsonSamples.cs | 19 +++ .../tests/JsonSamples/PipelineJsonSamples.cs | 47 +++++++ 28 files changed, 1015 insertions(+), 96 deletions(-) delete mode 100644 sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/TabularPartitionOption.cs create mode 100644 sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeDataset.cs create mode 100644 sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeExportCommand.cs create mode 100644 sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeImportCommand.cs create mode 100644 sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeLinkedService.cs create mode 100644 sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeSink.cs create mode 100644 sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeSource.cs rename sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/{Extensions => Generated/Models}/CopyBehaviorType.cs (86%) rename sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/{Extensions => Generated/Models}/CopyTranslator.cs (100%) rename sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/{Extensions => Generated/Models}/DatasetDataElement.cs (90%) create mode 100644 sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DatasetSchemaDataElement.cs rename sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/{Extensions => Generated/Models}/DynamicsSinkWriteBehavior.cs (83%) create mode 100644 sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcWriteSettings.cs create mode 100644 sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/ParquetWriteSettings.cs rename sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/{Extensions => Generated/Models}/TabularTranslator.cs (56%) rename sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/{Extensions => Generated/Models}/TypeConversionSettings.cs (59%) diff --git a/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt b/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt index 851c0094f1a1..88213a692e47 100644 --- a/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt +++ b/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt @@ -4,11 +4,11 @@ Commencing code generation Generating CSharp code Executing AutoRest command cmd.exe /c autorest.cmd https://github.com/Azure/azure-rest-api-specs/blob/master/specification/datafactory/resource-manager/readme.md --csharp --version=v2 --reflect-api-versions --tag=package-2018-06 --csharp-sdks-folder=D:\Projects\azure-sdk-for-net\sdk -2020-09-01 05:25:03 UTC +2020-09-21 07:26:19 UTC Azure-rest-api-specs repository information GitHub fork: Azure Branch: master -Commit: aa96f138f37c06bdbf3458a4fa327f08a593594c +Commit: fce3400431eff281bddd04bed9727e63765b8da0 AutoRest information Requested version: v2 Bootstrapper version: autorest@2.0.4413 diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md index 8c741351ef74..bc38aa927cf9 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md @@ -4,6 +4,8 @@ ### Feature Additions - Added logLevel/enableReliableLogging to LogStorageSettings - Support Tar GZip compression type in Data Factory +- Added maxRowPerFile/fileNamePrefix to tabular format settings +- Added support for AzureDatabricks delta lake ## Version 4.11.0 ### Feature Additions diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/TabularPartitionOption.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/TabularPartitionOption.cs deleted file mode 100644 index 9730699a3c58..000000000000 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/TabularPartitionOption.cs +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for -// license information. -// -// Code generated by Microsoft (R) AutoRest Code Generator 1.1.0.0 -// Changes may cause incorrect behavior and will be lost if the code is -// regenerated. - -namespace Microsoft.Azure.Management.DataFactory.Models -{ - - /// - /// Defines values for TabularPartitionOption. - /// - public static class TabularPartitionOption - { - public const string None = "None"; - public const string PartitionOnInt = "PartitionOnInt"; - public const string PartitionOnCalendarYear = "PartitionOnCalendarYear"; - public const string PartitionOnCalendarMonth = "PartitionOnCalendarMonth"; - public const string PartitionOnCalendarDate = "PartitionOnCalendarDate"; - } -} diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/DataFactoryManagementClient.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/DataFactoryManagementClient.cs index 028d20fc7a8a..46f1baaa95ab 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/DataFactoryManagementClient.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/DataFactoryManagementClient.cs @@ -501,6 +501,8 @@ private void Initialize() DeserializationSettings.Converters.Add(new PolymorphicDeserializeJsonConverter("type")); SerializationSettings.Converters.Add(new PolymorphicSerializeJsonConverter("type")); DeserializationSettings.Converters.Add(new PolymorphicDeserializeJsonConverter("type")); + SerializationSettings.Converters.Add(new PolymorphicSerializeJsonConverter("type")); + DeserializationSettings.Converters.Add(new PolymorphicDeserializeJsonConverter("type")); CustomInitialize(); DeserializationSettings.Converters.Add(new TransformationJsonConverter()); DeserializationSettings.Converters.Add(new CloudErrorJsonConverter()); diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AvroWriteSettings.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AvroWriteSettings.cs index f0bdfe3267bd..dfa465d8007b 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AvroWriteSettings.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AvroWriteSettings.cs @@ -37,11 +37,20 @@ public AvroWriteSettings() /// which is required in AVRO spec. /// Record namespace in the write /// result. - public AvroWriteSettings(IDictionary additionalProperties = default(IDictionary), string recordName = default(string), string recordNamespace = default(string)) + /// Limit the written file's row count to + /// be smaller than or equal to the specified count. Type: integer (or + /// Expression with resultType integer). + /// Specifies the file name pattern + /// <fileNamePrefix>_<fileIndex>.<fileExtension> when + /// copy from non-file based store without partitionOptions. Type: + /// string (or Expression with resultType string). + public AvroWriteSettings(IDictionary additionalProperties = default(IDictionary), string recordName = default(string), string recordNamespace = default(string), object maxRowsPerFile = default(object), object fileNamePrefix = default(object)) : base(additionalProperties) { RecordName = recordName; RecordNamespace = recordNamespace; + MaxRowsPerFile = maxRowsPerFile; + FileNamePrefix = fileNamePrefix; CustomInit(); } @@ -63,5 +72,22 @@ public AvroWriteSettings() [JsonProperty(PropertyName = "recordNamespace")] public string RecordNamespace { get; set; } + /// + /// Gets or sets limit the written file's row count to be smaller than + /// or equal to the specified count. Type: integer (or Expression with + /// resultType integer). + /// + [JsonProperty(PropertyName = "maxRowsPerFile")] + public object MaxRowsPerFile { get; set; } + + /// + /// Gets or sets specifies the file name pattern + /// &lt;fileNamePrefix&gt;_&lt;fileIndex&gt;.&lt;fileExtension&gt; + /// when copy from non-file based store without partitionOptions. Type: + /// string (or Expression with resultType string). + /// + [JsonProperty(PropertyName = "fileNamePrefix")] + public object FileNamePrefix { get; set; } + } } diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeDataset.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeDataset.cs new file mode 100644 index 000000000000..88ce47ca4f48 --- /dev/null +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeDataset.cs @@ -0,0 +1,97 @@ +// +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for +// license information. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is +// regenerated. +// + +namespace Microsoft.Azure.Management.DataFactory.Models +{ + using Microsoft.Rest; + using Microsoft.Rest.Serialization; + using Newtonsoft.Json; + using System.Collections; + using System.Collections.Generic; + using System.Linq; + + /// + /// Azure Databricks Delta Lake dataset. + /// + [Rest.Serialization.JsonTransformation] + public partial class AzureDatabricksDeltaLakeDataset : Dataset + { + /// + /// Initializes a new instance of the AzureDatabricksDeltaLakeDataset + /// class. + /// + public AzureDatabricksDeltaLakeDataset() + { + LinkedServiceName = new LinkedServiceReference(); + CustomInit(); + } + + /// + /// Initializes a new instance of the AzureDatabricksDeltaLakeDataset + /// class. + /// + /// Linked service reference. + /// Unmatched properties from the + /// message are deserialized this collection + /// Dataset description. + /// Columns that define the structure of the + /// dataset. Type: array (or Expression with resultType array), + /// itemType: DatasetDataElement. + /// Columns that define the physical type schema + /// of the dataset. Type: array (or Expression with resultType array), + /// itemType: DatasetSchemaDataElement. + /// Parameters for dataset. + /// List of tags that can be used for + /// describing the Dataset. + /// The folder that this Dataset is in. If not + /// specified, Dataset will appear at the root level. + /// The name of delta table. Type: string (or + /// Expression with resultType string). + /// The database name of delta table. Type: + /// string (or Expression with resultType string). + public AzureDatabricksDeltaLakeDataset(LinkedServiceReference linkedServiceName, IDictionary additionalProperties = default(IDictionary), string description = default(string), object structure = default(object), object schema = default(object), IDictionary parameters = default(IDictionary), IList annotations = default(IList), DatasetFolder folder = default(DatasetFolder), object table = default(object), object database = default(object)) + : base(linkedServiceName, additionalProperties, description, structure, schema, parameters, annotations, folder) + { + Table = table; + Database = database; + CustomInit(); + } + + /// + /// An initialization method that performs custom operations like setting defaults + /// + partial void CustomInit(); + + /// + /// Gets or sets the name of delta table. Type: string (or Expression + /// with resultType string). + /// + [JsonProperty(PropertyName = "typeProperties.table")] + public object Table { get; set; } + + /// + /// Gets or sets the database name of delta table. Type: string (or + /// Expression with resultType string). + /// + [JsonProperty(PropertyName = "typeProperties.database")] + public object Database { get; set; } + + /// + /// Validate the object. + /// + /// + /// Thrown if validation fails + /// + public override void Validate() + { + base.Validate(); + } + } +} diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeExportCommand.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeExportCommand.cs new file mode 100644 index 000000000000..2a95efec6934 --- /dev/null +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeExportCommand.cs @@ -0,0 +1,74 @@ +// +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for +// license information. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is +// regenerated. +// + +namespace Microsoft.Azure.Management.DataFactory.Models +{ + using Newtonsoft.Json; + using System.Collections; + using System.Collections.Generic; + using System.Linq; + + /// + /// Azure Databricks Delta Lake export command settings. + /// + public partial class AzureDatabricksDeltaLakeExportCommand : ExportSettings + { + /// + /// Initializes a new instance of the + /// AzureDatabricksDeltaLakeExportCommand class. + /// + public AzureDatabricksDeltaLakeExportCommand() + { + CustomInit(); + } + + /// + /// Initializes a new instance of the + /// AzureDatabricksDeltaLakeExportCommand class. + /// + /// Unmatched properties from the + /// message are deserialized this collection + /// Specify the date format for the csv in + /// Azure Databricks Delta Lake Copy. Type: string (or Expression with + /// resultType string). + /// Specify the timestamp format for the + /// csv in Azure Databricks Delta Lake Copy. Type: string (or + /// Expression with resultType string). + public AzureDatabricksDeltaLakeExportCommand(IDictionary additionalProperties = default(IDictionary), object dateFormat = default(object), object timestampFormat = default(object)) + : base(additionalProperties) + { + DateFormat = dateFormat; + TimestampFormat = timestampFormat; + CustomInit(); + } + + /// + /// An initialization method that performs custom operations like setting defaults + /// + partial void CustomInit(); + + /// + /// Gets or sets specify the date format for the csv in Azure + /// Databricks Delta Lake Copy. Type: string (or Expression with + /// resultType string). + /// + [JsonProperty(PropertyName = "dateFormat")] + public object DateFormat { get; set; } + + /// + /// Gets or sets specify the timestamp format for the csv in Azure + /// Databricks Delta Lake Copy. Type: string (or Expression with + /// resultType string). + /// + [JsonProperty(PropertyName = "timestampFormat")] + public object TimestampFormat { get; set; } + + } +} diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeImportCommand.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeImportCommand.cs new file mode 100644 index 000000000000..79e29fe8232d --- /dev/null +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeImportCommand.cs @@ -0,0 +1,74 @@ +// +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for +// license information. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is +// regenerated. +// + +namespace Microsoft.Azure.Management.DataFactory.Models +{ + using Newtonsoft.Json; + using System.Collections; + using System.Collections.Generic; + using System.Linq; + + /// + /// Azure Databricks Delta Lake import command settings. + /// + public partial class AzureDatabricksDeltaLakeImportCommand : ImportSettings + { + /// + /// Initializes a new instance of the + /// AzureDatabricksDeltaLakeImportCommand class. + /// + public AzureDatabricksDeltaLakeImportCommand() + { + CustomInit(); + } + + /// + /// Initializes a new instance of the + /// AzureDatabricksDeltaLakeImportCommand class. + /// + /// Unmatched properties from the + /// message are deserialized this collection + /// Specify the date format for csv in Azure + /// Databricks Delta Lake Copy. Type: string (or Expression with + /// resultType string). + /// Specify the timestamp format for csv + /// in Azure Databricks Delta Lake Copy. Type: string (or Expression + /// with resultType string). + public AzureDatabricksDeltaLakeImportCommand(IDictionary additionalProperties = default(IDictionary), object dateFormat = default(object), object timestampFormat = default(object)) + : base(additionalProperties) + { + DateFormat = dateFormat; + TimestampFormat = timestampFormat; + CustomInit(); + } + + /// + /// An initialization method that performs custom operations like setting defaults + /// + partial void CustomInit(); + + /// + /// Gets or sets specify the date format for csv in Azure Databricks + /// Delta Lake Copy. Type: string (or Expression with resultType + /// string). + /// + [JsonProperty(PropertyName = "dateFormat")] + public object DateFormat { get; set; } + + /// + /// Gets or sets specify the timestamp format for csv in Azure + /// Databricks Delta Lake Copy. Type: string (or Expression with + /// resultType string). + /// + [JsonProperty(PropertyName = "timestampFormat")] + public object TimestampFormat { get; set; } + + } +} diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeLinkedService.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeLinkedService.cs new file mode 100644 index 000000000000..ec88ac66a5ac --- /dev/null +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeLinkedService.cs @@ -0,0 +1,123 @@ +// +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for +// license information. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is +// regenerated. +// + +namespace Microsoft.Azure.Management.DataFactory.Models +{ + using Microsoft.Rest; + using Microsoft.Rest.Serialization; + using Newtonsoft.Json; + using System.Collections; + using System.Collections.Generic; + using System.Linq; + + /// + /// Azure Databricks Delta Lake linked service. + /// + [Newtonsoft.Json.JsonObject("AzureDatabricksDeltaLake")] + [Rest.Serialization.JsonTransformation] + public partial class AzureDatabricksDeltaLakeLinkedService : LinkedService + { + /// + /// Initializes a new instance of the + /// AzureDatabricksDeltaLakeLinkedService class. + /// + public AzureDatabricksDeltaLakeLinkedService() + { + CustomInit(); + } + + /// + /// Initializes a new instance of the + /// AzureDatabricksDeltaLakeLinkedService class. + /// + /// <REGION>.azuredatabricks.net, domain + /// name of your Databricks deployment. Type: string (or Expression + /// with resultType string). + /// Unmatched properties from the + /// message are deserialized this collection + /// The integration runtime reference. + /// Linked service description. + /// Parameters for linked service. + /// List of tags that can be used for + /// describing the linked service. + /// Access token for databricks REST API. + /// Refer to + /// https://docs.azuredatabricks.net/api/latest/authentication.html. + /// Type: string, SecureString or AzureKeyVaultSecretReference. + /// The id of an existing interactive cluster + /// that will be used for all runs of this job. Type: string (or + /// Expression with resultType string). + /// The encrypted credential used for + /// authentication. Credentials are encrypted using the integration + /// runtime credential manager. Type: string (or Expression with + /// resultType string). + public AzureDatabricksDeltaLakeLinkedService(object domain, IDictionary additionalProperties = default(IDictionary), IntegrationRuntimeReference connectVia = default(IntegrationRuntimeReference), string description = default(string), IDictionary parameters = default(IDictionary), IList annotations = default(IList), SecretBase accessToken = default(SecretBase), object clusterId = default(object), object encryptedCredential = default(object)) + : base(additionalProperties, connectVia, description, parameters, annotations) + { + Domain = domain; + AccessToken = accessToken; + ClusterId = clusterId; + EncryptedCredential = encryptedCredential; + CustomInit(); + } + + /// + /// An initialization method that performs custom operations like setting defaults + /// + partial void CustomInit(); + + /// + /// Gets or sets &lt;REGION&gt;.azuredatabricks.net, domain + /// name of your Databricks deployment. Type: string (or Expression + /// with resultType string). + /// + [JsonProperty(PropertyName = "typeProperties.domain")] + public object Domain { get; set; } + + /// + /// Gets or sets access token for databricks REST API. Refer to + /// https://docs.azuredatabricks.net/api/latest/authentication.html. + /// Type: string, SecureString or AzureKeyVaultSecretReference. + /// + [JsonProperty(PropertyName = "typeProperties.accessToken")] + public SecretBase AccessToken { get; set; } + + /// + /// Gets or sets the id of an existing interactive cluster that will be + /// used for all runs of this job. Type: string (or Expression with + /// resultType string). + /// + [JsonProperty(PropertyName = "typeProperties.clusterId")] + public object ClusterId { get; set; } + + /// + /// Gets or sets the encrypted credential used for authentication. + /// Credentials are encrypted using the integration runtime credential + /// manager. Type: string (or Expression with resultType string). + /// + [JsonProperty(PropertyName = "typeProperties.encryptedCredential")] + public object EncryptedCredential { get; set; } + + /// + /// Validate the object. + /// + /// + /// Thrown if validation fails + /// + public override void Validate() + { + base.Validate(); + if (Domain == null) + { + throw new ValidationException(ValidationRules.CannotBeNull, "Domain"); + } + } + } +} diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeSink.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeSink.cs new file mode 100644 index 000000000000..7df2d3a1dd14 --- /dev/null +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeSink.cs @@ -0,0 +1,82 @@ +// +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for +// license information. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is +// regenerated. +// + +namespace Microsoft.Azure.Management.DataFactory.Models +{ + using Newtonsoft.Json; + using System.Collections; + using System.Collections.Generic; + using System.Linq; + + /// + /// A copy activity Azure Databricks Delta Lake sink. + /// + public partial class AzureDatabricksDeltaLakeSink : CopySink + { + /// + /// Initializes a new instance of the AzureDatabricksDeltaLakeSink + /// class. + /// + public AzureDatabricksDeltaLakeSink() + { + CustomInit(); + } + + /// + /// Initializes a new instance of the AzureDatabricksDeltaLakeSink + /// class. + /// + /// Unmatched properties from the + /// message are deserialized this collection + /// Write batch size. Type: integer (or + /// Expression with resultType integer), minimum: 0. + /// Write batch timeout. Type: string + /// (or Expression with resultType string), pattern: + /// ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + /// Sink retry count. Type: integer (or + /// Expression with resultType integer). + /// Sink retry wait. Type: string (or + /// Expression with resultType string), pattern: + /// ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + /// The maximum concurrent + /// connection count for the sink data store. Type: integer (or + /// Expression with resultType integer). + /// SQL pre-copy script. Type: string (or + /// Expression with resultType string). + /// Azure Databricks Delta Lake import + /// settings. + public AzureDatabricksDeltaLakeSink(IDictionary additionalProperties = default(IDictionary), object writeBatchSize = default(object), object writeBatchTimeout = default(object), object sinkRetryCount = default(object), object sinkRetryWait = default(object), object maxConcurrentConnections = default(object), object preCopyScript = default(object), AzureDatabricksDeltaLakeImportCommand importSettings = default(AzureDatabricksDeltaLakeImportCommand)) + : base(additionalProperties, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections) + { + PreCopyScript = preCopyScript; + ImportSettings = importSettings; + CustomInit(); + } + + /// + /// An initialization method that performs custom operations like setting defaults + /// + partial void CustomInit(); + + /// + /// Gets or sets SQL pre-copy script. Type: string (or Expression with + /// resultType string). + /// + [JsonProperty(PropertyName = "preCopyScript")] + public object PreCopyScript { get; set; } + + /// + /// Gets or sets azure Databricks Delta Lake import settings. + /// + [JsonProperty(PropertyName = "importSettings")] + public AzureDatabricksDeltaLakeImportCommand ImportSettings { get; set; } + + } +} diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeSource.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeSource.cs new file mode 100644 index 000000000000..55f52dbb477d --- /dev/null +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksDeltaLakeSource.cs @@ -0,0 +1,77 @@ +// +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for +// license information. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is +// regenerated. +// + +namespace Microsoft.Azure.Management.DataFactory.Models +{ + using Newtonsoft.Json; + using System.Collections; + using System.Collections.Generic; + using System.Linq; + + /// + /// A copy activity Azure Databricks Delta Lake source. + /// + public partial class AzureDatabricksDeltaLakeSource : CopySource + { + /// + /// Initializes a new instance of the AzureDatabricksDeltaLakeSource + /// class. + /// + public AzureDatabricksDeltaLakeSource() + { + CustomInit(); + } + + /// + /// Initializes a new instance of the AzureDatabricksDeltaLakeSource + /// class. + /// + /// Unmatched properties from the + /// message are deserialized this collection + /// Source retry count. Type: integer + /// (or Expression with resultType integer). + /// Source retry wait. Type: string (or + /// Expression with resultType string), pattern: + /// ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + /// The maximum concurrent + /// connection count for the source data store. Type: integer (or + /// Expression with resultType integer). + /// Azure Databricks Delta Lake Sql query. Type: + /// string (or Expression with resultType string). + /// Azure Databricks Delta Lake export + /// settings. + public AzureDatabricksDeltaLakeSource(IDictionary additionalProperties = default(IDictionary), object sourceRetryCount = default(object), object sourceRetryWait = default(object), object maxConcurrentConnections = default(object), object query = default(object), AzureDatabricksDeltaLakeExportCommand exportSettings = default(AzureDatabricksDeltaLakeExportCommand)) + : base(additionalProperties, sourceRetryCount, sourceRetryWait, maxConcurrentConnections) + { + Query = query; + ExportSettings = exportSettings; + CustomInit(); + } + + /// + /// An initialization method that performs custom operations like setting defaults + /// + partial void CustomInit(); + + /// + /// Gets or sets azure Databricks Delta Lake Sql query. Type: string + /// (or Expression with resultType string). + /// + [JsonProperty(PropertyName = "query")] + public object Query { get; set; } + + /// + /// Gets or sets azure Databricks Delta Lake export settings. + /// + [JsonProperty(PropertyName = "exportSettings")] + public AzureDatabricksDeltaLakeExportCommand ExportSettings { get; set; } + + } +} diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/CopyBehaviorType.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/CopyBehaviorType.cs similarity index 86% rename from sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/CopyBehaviorType.cs rename to sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/CopyBehaviorType.cs index 42805261ca0e..01cc5d50b1bb 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/CopyBehaviorType.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/CopyBehaviorType.cs @@ -1,10 +1,12 @@ +// // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // -// Code generated by Microsoft (R) AutoRest Code Generator 1.1.0.0 +// Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. +// namespace Microsoft.Azure.Management.DataFactory.Models { diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/CopyTranslator.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/CopyTranslator.cs similarity index 100% rename from sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/CopyTranslator.cs rename to sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/CopyTranslator.cs diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/DatasetDataElement.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DatasetDataElement.cs similarity index 90% rename from sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/DatasetDataElement.cs rename to sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DatasetDataElement.cs index 3a0df0cf6014..d6871725ed8b 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/DatasetDataElement.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DatasetDataElement.cs @@ -1,16 +1,15 @@ +// // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // -// Code generated by Microsoft (R) AutoRest Code Generator 1.1.0.0 +// Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. +// namespace Microsoft.Azure.Management.DataFactory.Models { - using Microsoft.Azure; - using Microsoft.Azure.Management; - using Microsoft.Azure.Management.DataFactory; using Newtonsoft.Json; using System.Linq; @@ -24,7 +23,7 @@ public partial class DatasetDataElement /// public DatasetDataElement() { - CustomInit(); + CustomInit(); } /// diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DatasetSchemaDataElement.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DatasetSchemaDataElement.cs new file mode 100644 index 000000000000..b79a156e2b66 --- /dev/null +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DatasetSchemaDataElement.cs @@ -0,0 +1,75 @@ +// +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for +// license information. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is +// regenerated. +// + +namespace Microsoft.Azure.Management.DataFactory.Models +{ + using Newtonsoft.Json; + using System.Collections; + using System.Collections.Generic; + using System.Linq; + + /// + /// Columns that define the physical type schema of the dataset. + /// + public partial class DatasetSchemaDataElement + { + /// + /// Initializes a new instance of the DatasetSchemaDataElement class. + /// + public DatasetSchemaDataElement() + { + CustomInit(); + } + + /// + /// Initializes a new instance of the DatasetSchemaDataElement class. + /// + /// Unmatched properties from the + /// message are deserialized this collection + /// Name of the schema column. Type: string (or + /// Expression with resultType string). + /// Type of the schema column. Type: string (or + /// Expression with resultType string). + public DatasetSchemaDataElement(IDictionary additionalProperties = default(IDictionary), object name = default(object), object type = default(object)) + { + AdditionalProperties = additionalProperties; + Name = name; + Type = type; + CustomInit(); + } + + /// + /// An initialization method that performs custom operations like setting defaults + /// + partial void CustomInit(); + + /// + /// Gets or sets unmatched properties from the message are deserialized + /// this collection + /// + [JsonExtensionData] + public IDictionary AdditionalProperties { get; set; } + + /// + /// Gets or sets name of the schema column. Type: string (or Expression + /// with resultType string). + /// + [JsonProperty(PropertyName = "name")] + public object Name { get; set; } + + /// + /// Gets or sets type of the schema column. Type: string (or Expression + /// with resultType string). + /// + [JsonProperty(PropertyName = "type")] + public object Type { get; set; } + + } +} diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DelimitedTextWriteSettings.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DelimitedTextWriteSettings.cs index a54f9c2e64bb..31e6a7d966ef 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DelimitedTextWriteSettings.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DelimitedTextWriteSettings.cs @@ -39,11 +39,20 @@ public DelimitedTextWriteSettings() /// Indicates whether string values should /// always be enclosed with quotes. Type: boolean (or Expression with /// resultType boolean). - public DelimitedTextWriteSettings(object fileExtension, IDictionary additionalProperties = default(IDictionary), object quoteAllText = default(object)) + /// Limit the written file's row count to + /// be smaller than or equal to the specified count. Type: integer (or + /// Expression with resultType integer). + /// Specifies the file name pattern + /// <fileNamePrefix>_<fileIndex>.<fileExtension> when + /// copy from non-file based store without partitionOptions. Type: + /// string (or Expression with resultType string). + public DelimitedTextWriteSettings(object fileExtension, IDictionary additionalProperties = default(IDictionary), object quoteAllText = default(object), object maxRowsPerFile = default(object), object fileNamePrefix = default(object)) : base(additionalProperties) { QuoteAllText = quoteAllText; FileExtension = fileExtension; + MaxRowsPerFile = maxRowsPerFile; + FileNamePrefix = fileNamePrefix; CustomInit(); } @@ -67,6 +76,23 @@ public DelimitedTextWriteSettings() [JsonProperty(PropertyName = "fileExtension")] public object FileExtension { get; set; } + /// + /// Gets or sets limit the written file's row count to be smaller than + /// or equal to the specified count. Type: integer (or Expression with + /// resultType integer). + /// + [JsonProperty(PropertyName = "maxRowsPerFile")] + public object MaxRowsPerFile { get; set; } + + /// + /// Gets or sets specifies the file name pattern + /// &lt;fileNamePrefix&gt;_&lt;fileIndex&gt;.&lt;fileExtension&gt; + /// when copy from non-file based store without partitionOptions. Type: + /// string (or Expression with resultType string). + /// + [JsonProperty(PropertyName = "fileNamePrefix")] + public object FileNamePrefix { get; set; } + /// /// Validate the object. /// diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/DynamicsSinkWriteBehavior.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DynamicsSinkWriteBehavior.cs similarity index 83% rename from sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/DynamicsSinkWriteBehavior.cs rename to sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DynamicsSinkWriteBehavior.cs index ab51de7a5277..6dda872f1716 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/DynamicsSinkWriteBehavior.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DynamicsSinkWriteBehavior.cs @@ -1,10 +1,12 @@ +// // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // -// Code generated by Microsoft (R) AutoRest Code Generator 1.1.0.0 +// Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. +// namespace Microsoft.Azure.Management.DataFactory.Models { diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcSink.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcSink.cs index 0eb07d896d9e..d90366477c64 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcSink.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcSink.cs @@ -47,10 +47,12 @@ public OrcSink() /// connection count for the sink data store. Type: integer (or /// Expression with resultType integer). /// ORC store settings. - public OrcSink(IDictionary additionalProperties = default(IDictionary), object writeBatchSize = default(object), object writeBatchTimeout = default(object), object sinkRetryCount = default(object), object sinkRetryWait = default(object), object maxConcurrentConnections = default(object), StoreWriteSettings storeSettings = default(StoreWriteSettings)) + /// ORC format settings. + public OrcSink(IDictionary additionalProperties = default(IDictionary), object writeBatchSize = default(object), object writeBatchTimeout = default(object), object sinkRetryCount = default(object), object sinkRetryWait = default(object), object maxConcurrentConnections = default(object), StoreWriteSettings storeSettings = default(StoreWriteSettings), OrcWriteSettings formatSettings = default(OrcWriteSettings)) : base(additionalProperties, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections) { StoreSettings = storeSettings; + FormatSettings = formatSettings; CustomInit(); } @@ -65,5 +67,11 @@ public OrcSink() [JsonProperty(PropertyName = "storeSettings")] public StoreWriteSettings StoreSettings { get; set; } + /// + /// Gets or sets ORC format settings. + /// + [JsonProperty(PropertyName = "formatSettings")] + public OrcWriteSettings FormatSettings { get; set; } + } } diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcWriteSettings.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcWriteSettings.cs new file mode 100644 index 000000000000..3dd01eab7727 --- /dev/null +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcWriteSettings.cs @@ -0,0 +1,74 @@ +// +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for +// license information. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is +// regenerated. +// + +namespace Microsoft.Azure.Management.DataFactory.Models +{ + using Newtonsoft.Json; + using System.Collections; + using System.Collections.Generic; + using System.Linq; + + /// + /// Orc write settings. + /// + public partial class OrcWriteSettings : FormatWriteSettings + { + /// + /// Initializes a new instance of the OrcWriteSettings class. + /// + public OrcWriteSettings() + { + CustomInit(); + } + + /// + /// Initializes a new instance of the OrcWriteSettings class. + /// + /// Unmatched properties from the + /// message are deserialized this collection + /// Limit the written file's row count to + /// be smaller than or equal to the specified count. Type: integer (or + /// Expression with resultType integer). + /// Specifies the file name pattern + /// <fileNamePrefix>_<fileIndex>.<fileExtension> when + /// copy from non-file based store without partitionOptions. Type: + /// string (or Expression with resultType string). + public OrcWriteSettings(IDictionary additionalProperties = default(IDictionary), object maxRowsPerFile = default(object), object fileNamePrefix = default(object)) + : base(additionalProperties) + { + MaxRowsPerFile = maxRowsPerFile; + FileNamePrefix = fileNamePrefix; + CustomInit(); + } + + /// + /// An initialization method that performs custom operations like setting defaults + /// + partial void CustomInit(); + + /// + /// Gets or sets limit the written file's row count to be smaller than + /// or equal to the specified count. Type: integer (or Expression with + /// resultType integer). + /// + [JsonProperty(PropertyName = "maxRowsPerFile")] + public object MaxRowsPerFile { get; set; } + + /// + /// Gets or sets specifies the file name pattern + /// &lt;fileNamePrefix&gt;_&lt;fileIndex&gt;.&lt;fileExtension&gt; + /// when copy from non-file based store without partitionOptions. Type: + /// string (or Expression with resultType string). + /// + [JsonProperty(PropertyName = "fileNamePrefix")] + public object FileNamePrefix { get; set; } + + } +} diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/ParquetSink.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/ParquetSink.cs index 67b95379fe09..5873985de449 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/ParquetSink.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/ParquetSink.cs @@ -47,10 +47,12 @@ public ParquetSink() /// connection count for the sink data store. Type: integer (or /// Expression with resultType integer). /// Parquet store settings. - public ParquetSink(IDictionary additionalProperties = default(IDictionary), object writeBatchSize = default(object), object writeBatchTimeout = default(object), object sinkRetryCount = default(object), object sinkRetryWait = default(object), object maxConcurrentConnections = default(object), StoreWriteSettings storeSettings = default(StoreWriteSettings)) + /// Parquet format settings. + public ParquetSink(IDictionary additionalProperties = default(IDictionary), object writeBatchSize = default(object), object writeBatchTimeout = default(object), object sinkRetryCount = default(object), object sinkRetryWait = default(object), object maxConcurrentConnections = default(object), StoreWriteSettings storeSettings = default(StoreWriteSettings), ParquetWriteSettings formatSettings = default(ParquetWriteSettings)) : base(additionalProperties, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections) { StoreSettings = storeSettings; + FormatSettings = formatSettings; CustomInit(); } @@ -65,5 +67,11 @@ public ParquetSink() [JsonProperty(PropertyName = "storeSettings")] public StoreWriteSettings StoreSettings { get; set; } + /// + /// Gets or sets parquet format settings. + /// + [JsonProperty(PropertyName = "formatSettings")] + public ParquetWriteSettings FormatSettings { get; set; } + } } diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/ParquetWriteSettings.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/ParquetWriteSettings.cs new file mode 100644 index 000000000000..5204159ed3aa --- /dev/null +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/ParquetWriteSettings.cs @@ -0,0 +1,74 @@ +// +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for +// license information. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is +// regenerated. +// + +namespace Microsoft.Azure.Management.DataFactory.Models +{ + using Newtonsoft.Json; + using System.Collections; + using System.Collections.Generic; + using System.Linq; + + /// + /// Parquet write settings. + /// + public partial class ParquetWriteSettings : FormatWriteSettings + { + /// + /// Initializes a new instance of the ParquetWriteSettings class. + /// + public ParquetWriteSettings() + { + CustomInit(); + } + + /// + /// Initializes a new instance of the ParquetWriteSettings class. + /// + /// Unmatched properties from the + /// message are deserialized this collection + /// Limit the written file's row count to + /// be smaller than or equal to the specified count. Type: integer (or + /// Expression with resultType integer). + /// Specifies the file name pattern + /// <fileNamePrefix>_<fileIndex>.<fileExtension> when + /// copy from non-file based store without partitionOptions. Type: + /// string (or Expression with resultType string). + public ParquetWriteSettings(IDictionary additionalProperties = default(IDictionary), object maxRowsPerFile = default(object), object fileNamePrefix = default(object)) + : base(additionalProperties) + { + MaxRowsPerFile = maxRowsPerFile; + FileNamePrefix = fileNamePrefix; + CustomInit(); + } + + /// + /// An initialization method that performs custom operations like setting defaults + /// + partial void CustomInit(); + + /// + /// Gets or sets limit the written file's row count to be smaller than + /// or equal to the specified count. Type: integer (or Expression with + /// resultType integer). + /// + [JsonProperty(PropertyName = "maxRowsPerFile")] + public object MaxRowsPerFile { get; set; } + + /// + /// Gets or sets specifies the file name pattern + /// &lt;fileNamePrefix&gt;_&lt;fileIndex&gt;.&lt;fileExtension&gt; + /// when copy from non-file based store without partitionOptions. Type: + /// string (or Expression with resultType string). + /// + [JsonProperty(PropertyName = "fileNamePrefix")] + public object FileNamePrefix { get; set; } + + } +} diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/TabularTranslator.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/TabularTranslator.cs similarity index 56% rename from sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/TabularTranslator.cs rename to sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/TabularTranslator.cs index c15d50bf35d1..b7f33e452c19 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/TabularTranslator.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/TabularTranslator.cs @@ -10,8 +10,10 @@ namespace Microsoft.Azure.Management.DataFactory.Models { - using System.Collections.Generic; using Newtonsoft.Json; + using System.Collections; + using System.Collections.Generic; + using System.Linq; /// /// A copy activity tabular translator. @@ -33,36 +35,41 @@ public TabularTranslator() /// message are deserialized this collection /// Column mappings. Example: "UserId: /// MyUserId, Group: MyGroup, Name: MyName" Type: string (or Expression - /// with resultType string). + /// with resultType string). This property will be retired. Please use + /// mappings property. /// The schema mapping to map between /// tabular data and hierarchical data. Example: {"Column1": /// "$.Column1", "Column2": "$.Column2.Property1", "Column3": /// "$.Column2.Property2"}. Type: object (or Expression with resultType - /// object). + /// object). This property will be retired. Please use mappings + /// property. /// The JSON Path of the Nested Array /// that is going to do cross-apply. Type: object (or Expression with /// resultType object). + /// Whether to map complex + /// (array and object) values to simple strings in json format. Type: + /// boolean (or Expression with resultType boolean). /// Column mappings with logical types. - /// Tabular->tabular example: [{\"source\":{\"name\":\"CustomerName\", - /// \"type\":\"String\"},\"sink\":{\"name\":\"ClientName\",\"type\":\" - /// String\"}},{\"source\":{\"name\":\"CustomerAddress\",\"type\":\" - /// String\"},\"sink\":{\"name\":\"ClientAddress\",\"type\":\"String\"}}]. - /// Hierarchical->tabular example: [{\"source\":{\"path\":\"$.CustomerName\", - /// \"type\":\"String\"},\"sink\":{\"name\":\"ClientName\",\"type\":\"String\"}}, - /// {\"source\":{\"path\":\"$.CustomerAddress\",\"type\":\"String\"},\"sink\": - /// {\"name\":\"ClientAddress\",\"type\":\"String\"}}]. Type: object - /// (or Expression with resultType object). - /// Whether to enable the advanced type conversion - /// feature in the Copy activity. Type: boolean (or Expression with resultType - /// boolean). - /// Type conversion settings - public TabularTranslator(IDictionary additionalProperties = default(IDictionary), object columnMappings = default(object), object schemaMapping = default(object), object collectionReference = default(object), object mappings = default(object), object typeConversion = default(object), TypeConversionSettings typeConversionSettings = default(TypeConversionSettings)) + /// Tabular->tabular example: + /// [{"source":{"name":"CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"name":"CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. + /// Hierarchical->tabular example: + /// [{"source":{"path":"$.CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"path":"$.CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. + /// Type: object (or Expression with resultType object). + /// Whether to enable the advanced type + /// conversion feature in the Copy activity. Type: boolean (or + /// Expression with resultType boolean). + /// Type conversion + /// settings + public TabularTranslator(IDictionary additionalProperties = default(IDictionary), object columnMappings = default(object), object schemaMapping = default(object), object collectionReference = default(object), object mapComplexValuesToString = default(object), object mappings = default(object), object typeConversion = default(object), TypeConversionSettings typeConversionSettings = default(TypeConversionSettings)) : base(additionalProperties) { ColumnMappings = columnMappings; SchemaMapping = schemaMapping; CollectionReference = collectionReference; + MapComplexValuesToString = mapComplexValuesToString; Mappings = mappings; + TypeConversion = typeConversion; + TypeConversionSettings = typeConversionSettings; CustomInit(); } @@ -72,9 +79,10 @@ public TabularTranslator() partial void CustomInit(); /// - /// Gets or sets column mappings. Example: "UserId: MyUserId, Group: + /// Gets or sets column mappings. Example: "UserId: MyUserId, Group: /// MyGroup, Name: MyName" Type: string (or Expression with resultType - /// string). + /// string). This property will be retired. Please use mappings + /// property. /// [JsonProperty(PropertyName = "columnMappings")] public object ColumnMappings { get; set; } @@ -83,7 +91,8 @@ public TabularTranslator() /// Gets or sets the schema mapping to map between tabular data and /// hierarchical data. Example: {"Column1": "$.Column1", "Column2": /// "$.Column2.Property1", "Column3": "$.Column2.Property2"}. Type: - /// object (or Expression with resultType object). + /// object (or Expression with resultType object). This property will + /// be retired. Please use mappings property. /// [JsonProperty(PropertyName = "schemaMapping")] public object SchemaMapping { get; set; } @@ -96,31 +105,37 @@ public TabularTranslator() public object CollectionReference { get; set; } /// - /// Gets or sets the column mappings with logical types. - /// Tabular->tabular example: [{\"source\":{\"name\":\"CustomerName\", - /// \"type\":\"String\"},\"sink\":{\"name\":\"ClientName\",\"type\":\" - /// String\"}},{\"source\":{\"name\":\"CustomerAddress\",\"type\":\" - /// String\"},\"sink\":{\"name\":\"ClientAddress\",\"type\":\"String\"}}]. - /// Hierarchical->tabular example: [{\"source\":{\"path\":\"$.CustomerName\", - /// \"type\":\"String\"},\"sink\":{\"name\":\"ClientName\",\"type\":\"String\"}}, - /// {\"source\":{\"path\":\"$.CustomerAddress\",\"type\":\"String\"},\"sink\": - /// {\"name\":\"ClientAddress\",\"type\":\"String\"}}]. Type: object - /// (or Expression with resultType object). + /// Gets or sets whether to map complex (array and object) values to + /// simple strings in json format. Type: boolean (or Expression with + /// resultType boolean). + /// + [JsonProperty(PropertyName = "mapComplexValuesToString")] + public object MapComplexValuesToString { get; set; } + + /// + /// Gets or sets column mappings with logical types. + /// Tabular-&gt;tabular example: + /// [{"source":{"name":"CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"name":"CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. + /// Hierarchical-&gt;tabular example: + /// [{"source":{"path":"$.CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"path":"$.CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. + /// Type: object (or Expression with resultType object). /// [JsonProperty(PropertyName = "mappings")] public object Mappings { get; set; } /// - /// Gets or sets typeConversion indicating whether to enable the advanced type conversion - /// feature in the Copy activity. Type: boolean (or Expression with resultType boolean). + /// Gets or sets whether to enable the advanced type conversion feature + /// in the Copy activity. Type: boolean (or Expression with resultType + /// boolean). /// [JsonProperty(PropertyName = "typeConversion")] public object TypeConversion { get; set; } /// - /// Gets or sets typeConversionSettings. + /// Gets or sets type conversion settings /// [JsonProperty(PropertyName = "typeConversionSettings")] public TypeConversionSettings TypeConversionSettings { get; set; } + } } diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/TypeConversionSettings.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/TypeConversionSettings.cs similarity index 59% rename from sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/TypeConversionSettings.cs rename to sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/TypeConversionSettings.cs index 02c57496faa8..57c7aa987dbe 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Extensions/TypeConversionSettings.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/TypeConversionSettings.cs @@ -1,4 +1,4 @@ -// +// // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. @@ -8,12 +8,14 @@ // regenerated. // - namespace Microsoft.Azure.Management.DataFactory.Models { - using System.Collections.Generic; using Newtonsoft.Json; + using System.Linq; + /// + /// Type conversion settings + /// public partial class TypeConversionSettings { /// @@ -27,17 +29,25 @@ public TypeConversionSettings() /// /// Initializes a new instance of the TypeConversionSettings class. /// - /// Unmatched properties from the message are deserialized this collection - /// Whether to allow data truncation when converting the data. Type: boolean (or Expression with resultType boolean). - /// Whether to treat boolean values as numbers. Type: boolean (or Expression with resultType boolean). - /// The format for DateTime values. Type: string (or Expression with resultType string). - /// The format for DateTimeOffset values. Type: string (or Expression with resultType string). - /// The format for TimeSpan values. Type: string (or Expression with resultType string). - /// The culture used to convert data from/to string. Type: string (or Expression with resultType string). - public TypeConversionSettings(IDictionary additionalProperties = default(IDictionary), object allowDataTruncation = default(object), object treatBooleanAsNumber = default(object), object dateTimeFormat = default(object), object dateTimeOffsetFormat = default(object), object timeSpanFormat = default(object), object culture = default(object)) + /// Whether to allow data truncation + /// when converting the data. Type: boolean (or Expression with + /// resultType boolean). + /// Whether to treat boolean values + /// as numbers. Type: boolean (or Expression with resultType + /// boolean). + /// The format for DateTime values. Type: + /// string (or Expression with resultType string). + /// The format for DateTimeOffset + /// values. Type: string (or Expression with resultType + /// string). + /// The format for TimeSpan values. Type: + /// string (or Expression with resultType string). + /// The culture used to convert data from/to + /// string. Type: string (or Expression with resultType + /// string). + public TypeConversionSettings(object allowDataTruncation = default(object), object treatBooleanAsNumber = default(object), object dateTimeFormat = default(object), object dateTimeOffsetFormat = default(object), object timeSpanFormat = default(object), object culture = default(object)) { - AdditionalProperties = additionalProperties; - AllowDataTruncation = AllowDataTruncation; + AllowDataTruncation = allowDataTruncation; TreatBooleanAsNumber = treatBooleanAsNumber; DateTimeFormat = dateTimeFormat; DateTimeOffsetFormat = dateTimeOffsetFormat; @@ -52,46 +62,46 @@ public TypeConversionSettings() partial void CustomInit(); /// - /// Gets or sets unmatched properties from the message are deserialized - /// this collection - /// - [JsonExtensionData] - public IDictionary AdditionalProperties { get; set; } - - /// - /// Gets or sets allowDataTruncation indicating whether to allow data truncation when converting the data. Type: boolean (or Expression with resultType boolean). + /// Gets or sets whether to allow data truncation when converting the + /// data. Type: boolean (or Expression with resultType boolean). /// [JsonProperty(PropertyName = "allowDataTruncation")] public object AllowDataTruncation { get; set; } /// - /// Gets or sets treatBooleanAsNumber indicating whether to treat boolean values as numbers. Type: boolean (or Expression with resultType boolean). + /// Gets or sets whether to treat boolean values as numbers. Type: + /// boolean (or Expression with resultType boolean). /// [JsonProperty(PropertyName = "treatBooleanAsNumber")] public object TreatBooleanAsNumber { get; set; } /// - /// Gets or sets the format for DateTime values. Type: string (or Expression with resultType string). + /// Gets or sets the format for DateTime values. Type: string (or + /// Expression with resultType string). /// [JsonProperty(PropertyName = "dateTimeFormat")] public object DateTimeFormat { get; set; } /// - /// Gets or sets the format for DateTimeOffset values. Type: string (or Expression with resultType string). + /// Gets or sets the format for DateTimeOffset values. Type: string (or + /// Expression with resultType string). /// [JsonProperty(PropertyName = "dateTimeOffsetFormat")] public object DateTimeOffsetFormat { get; set; } /// - /// Gets or sets the format for TimeSpan values. Type: string (or Expression with resultType string). + /// Gets or sets the format for TimeSpan values. Type: string (or + /// Expression with resultType string). /// [JsonProperty(PropertyName = "timeSpanFormat")] public object TimeSpanFormat { get; set; } /// - /// Gets or sets the culture used to convert data from/to string. Type: string (or Expression with resultType string). + /// Gets or sets the culture used to convert data from/to string. Type: + /// string (or Expression with resultType string). /// [JsonProperty(PropertyName = "culture")] public object Culture { get; set; } + } } diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/SdkInfo_DataFactoryManagementClient.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/SdkInfo_DataFactoryManagementClient.cs index c15c9ea7d5b4..2011a9871690 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/SdkInfo_DataFactoryManagementClient.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/SdkInfo_DataFactoryManagementClient.cs @@ -29,11 +29,14 @@ public static IEnumerable> ApiInfo_DataFactoryMana new Tuple("DataFactory", "IntegrationRuntimeObjectMetadata", "2018-06-01"), new Tuple("DataFactory", "IntegrationRuntimes", "2018-06-01"), new Tuple("DataFactory", "LinkedServices", "2018-06-01"), + new Tuple("DataFactory", "ManagedPrivateEndpoints", "2018-06-01"), + new Tuple("DataFactory", "ManagedVirtualNetworks", "2018-06-01"), new Tuple("DataFactory", "Operations", "2018-06-01"), new Tuple("DataFactory", "PipelineRuns", "2018-06-01"), new Tuple("DataFactory", "Pipelines", "2018-06-01"), new Tuple("DataFactory", "TriggerRuns", "2018-06-01"), new Tuple("DataFactory", "Triggers", "2018-06-01"), + new Tuple("DataFactory", "managedPrivateEndpoints", "2018-06-01"), }.AsEnumerable(); } } @@ -43,7 +46,7 @@ public static IEnumerable> ApiInfo_DataFactoryMana public static readonly String AutoRestCmdExecuted = "cmd.exe /c autorest.cmd https://github.com/Azure/azure-rest-api-specs/blob/master/specification/datafactory/resource-manager/readme.md --csharp --version=v2 --reflect-api-versions --tag=package-2018-06 --csharp-sdks-folder=D:\\Projects\\azure-sdk-for-net\\sdk"; public static readonly String GithubForkName = "Azure"; public static readonly String GithubBranchName = "master"; - public static readonly String GithubCommidId = "b8630cc7b5869fbb764eeca3a618b23141e612db"; + public static readonly String GithubCommidId = "fce3400431eff281bddd04bed9727e63765b8da0"; public static readonly String CodeGenerationErrors = ""; public static readonly String GithubRepoName = "azure-rest-api-specs"; // END: Code Generation Metadata Section diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj index fdd9b23a1107..faa5abee941f 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj @@ -12,6 +12,8 @@ diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/DatasetJsonSamples.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/DatasetJsonSamples.cs index d3d6cda306b0..caae96128822 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/DatasetJsonSamples.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/DatasetJsonSamples.cs @@ -45,6 +45,27 @@ public class DatasetJsonSamples : JsonSampleCollection } }"; + [JsonSample] + public const string AzureDatabricksDeltaLakeTable = @" +{ + name: ""AzureDatabricksDeltaLakeDataset"", + properties: + { + type: ""AzureDatabricksDeltaLakeDataset"", + linkedServiceName: + { + referenceName : ""ls"", + type : ""LinkedServiceReference"" + }, + typeProperties: + { + ""table"": ""test"", + ""database"": ""default"" + } + } +} +"; + [JsonSample] public const string AzureTable = @" { diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs index fc970af02d2c..b1ebc369f2c3 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs @@ -32,6 +32,25 @@ public class LinkedServiceJsonSamples : JsonSampleCollection { + [JsonSample] + public const string AzureDatabricksDeltaLakeCopyActivity = @" +{ + ""name"": ""ExampleCopyActivity"", + ""properties"": { + ""activities"": [ + { + ""name"": ""MyActivity"", + ""type"": ""Copy"", + ""typeProperties"": { + ""source"": { + ""type"": ""AzureDatabricksDeltaLakeSource"", + ""query"": ""abc"", + ""exportSettings"": { + ""type"": ""AzureDatabricksDeltaLakeExportCommand"", + ""dateFormat"": ""xxx"", + ""timestampFormat"": ""xxx"" + } + }, + ""sink"": { + ""type"": ""AzureDatabricksDeltaLakeSink"", + ""preCopyScript"": ""123"", + ""importSettings"": { + ""type"": ""AzureDatabricksDeltaLakeImportCommand"", + ""dateFormat"": ""xxx"", + ""timestampFormat"": ""xxx"" + } + } + }, + ""inputs"": [ + { + ""referenceName"": ""exampleSourceDataset"", + ""type"": ""DatasetReference"" + } + ], + ""outputs"": [ + { + ""referenceName"": ""exampleSinkDataset"", + ""type"": ""DatasetReference"" + } + ] + } + ] + } +} +"; + [JsonSample] public const string CopyActivity = @" { From 39b62ef0e3df601d1bfd04bb08c77863a4ac6f8d Mon Sep 17 00:00:00 2001 From: Jingshu Pan Date: Mon, 21 Sep 2020 15:53:04 +0800 Subject: [PATCH 2/9] Add a test --- .../tests/JsonSamples/PipelineJsonSamples.cs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/PipelineJsonSamples.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/PipelineJsonSamples.cs index 62e9ce570ee0..7d22cbbc6290 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/PipelineJsonSamples.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/PipelineJsonSamples.cs @@ -4421,7 +4421,9 @@ public class PipelineJsonSamples : JsonSampleCollection ""formatSettings"": { ""type"": ""DelimitedTextWriteSettings"", ""quoteAllText"": true, - ""fileExtension"": "".csv"" + ""fileExtension"": "".csv"", + ""maxRowsPerFile"":10, + ""fileNamePrefix"": ""orcSinkFile"" } }, ""validateDataConsistency"": true, From 6ab7562d6fa812718f2fef8564954922f8aa0bcb Mon Sep 17 00:00:00 2001 From: Jingshu Pan Date: Mon, 21 Sep 2020 15:53:29 +0800 Subject: [PATCH 3/9] add a space --- .../tests/JsonSamples/PipelineJsonSamples.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/PipelineJsonSamples.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/PipelineJsonSamples.cs index 7d22cbbc6290..9c4be5ece212 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/PipelineJsonSamples.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/PipelineJsonSamples.cs @@ -4422,7 +4422,7 @@ public class PipelineJsonSamples : JsonSampleCollection ""type"": ""DelimitedTextWriteSettings"", ""quoteAllText"": true, ""fileExtension"": "".csv"", - ""maxRowsPerFile"":10, + ""maxRowsPerFile"": 10, ""fileNamePrefix"": ""orcSinkFile"" } }, From 8ff6dca12076858f8ae26418ed99455098ad8911 Mon Sep 17 00:00:00 2001 From: Jingshu Pan Date: Wed, 23 Sep 2020 17:39:40 +0800 Subject: [PATCH 4/9] [DataFactory]Update Rest Sink properties --- .../datafactory_resource-manager.txt | 4 +-- .../src/CHANGELOG.md | 1 + .../src/Generated/Models/RestSink.cs | 33 ++++++------------- ...rosoft.Azure.Management.DataFactory.csproj | 1 + .../tests/JsonSamples/PipelineJsonSamples.cs | 3 +- 5 files changed, 15 insertions(+), 27 deletions(-) diff --git a/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt b/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt index 88213a692e47..4d36e20ea352 100644 --- a/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt +++ b/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt @@ -4,11 +4,11 @@ Commencing code generation Generating CSharp code Executing AutoRest command cmd.exe /c autorest.cmd https://github.com/Azure/azure-rest-api-specs/blob/master/specification/datafactory/resource-manager/readme.md --csharp --version=v2 --reflect-api-versions --tag=package-2018-06 --csharp-sdks-folder=D:\Projects\azure-sdk-for-net\sdk -2020-09-21 07:26:19 UTC +2020-09-23 09:36:41 UTC Azure-rest-api-specs repository information GitHub fork: Azure Branch: master -Commit: fce3400431eff281bddd04bed9727e63765b8da0 +Commit: bfc3af3be11659794186df73982a11d3c84ec05a AutoRest information Requested version: v2 Bootstrapper version: autorest@2.0.4413 diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md index bc38aa927cf9..f35ab856d6c1 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md @@ -6,6 +6,7 @@ - Support Tar GZip compression type in Data Factory - Added maxRowPerFile/fileNamePrefix to tabular format settings - Added support for AzureDatabricks delta lake +- Update Rest Sink properties ## Version 4.11.0 ### Feature Additions diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/RestSink.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/RestSink.cs index eb28aa93e384..9267ee56d7cc 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/RestSink.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/RestSink.cs @@ -59,22 +59,17 @@ public RestSink() /// ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). /// The time to await before sending next /// request, in milliseconds - /// Compression Type to Send data in - /// compressed format with Optimal Compression Level, Default is None. - /// And The Only Supported option is Gzip. - /// Wraps Request Array Json - /// into an Object before calling the rest endpoint , Default is false. - /// ex: if true request content sample format is { rows:[]} else the - /// format is [] - public RestSink(IDictionary additionalProperties = default(IDictionary), object writeBatchSize = default(object), object writeBatchTimeout = default(object), object sinkRetryCount = default(object), object sinkRetryWait = default(object), object maxConcurrentConnections = default(object), object requestMethod = default(object), object additionalHeaders = default(object), object httpRequestTimeout = default(object), object requestInterval = default(object), object compressionType = default(object), object wrapRequestJsonInAnObject = default(object)) + /// Http Compression Type to Send + /// data in compressed format with Optimal Compression Level, Default + /// is None. And The Only Supported option is Gzip. + public RestSink(IDictionary additionalProperties = default(IDictionary), object writeBatchSize = default(object), object writeBatchTimeout = default(object), object sinkRetryCount = default(object), object sinkRetryWait = default(object), object maxConcurrentConnections = default(object), object requestMethod = default(object), object additionalHeaders = default(object), object httpRequestTimeout = default(object), object requestInterval = default(object), object httpCompressionType = default(object)) : base(additionalProperties, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections) { RequestMethod = requestMethod; AdditionalHeaders = additionalHeaders; HttpRequestTimeout = httpRequestTimeout; RequestInterval = requestInterval; - CompressionType = compressionType; - WrapRequestJsonInAnObject = wrapRequestJsonInAnObject; + HttpCompressionType = httpCompressionType; CustomInit(); } @@ -116,20 +111,12 @@ public RestSink() public object RequestInterval { get; set; } /// - /// Gets or sets compression Type to Send data in compressed format - /// with Optimal Compression Level, Default is None. And The Only - /// Supported option is Gzip. + /// Gets or sets http Compression Type to Send data in compressed + /// format with Optimal Compression Level, Default is None. And The + /// Only Supported option is Gzip. /// - [JsonProperty(PropertyName = "compressionType")] - public object CompressionType { get; set; } - - /// - /// Gets or sets wraps Request Array Json into an Object before calling - /// the rest endpoint , Default is false. ex: if true request content - /// sample format is { rows:[]} else the format is [] - /// - [JsonProperty(PropertyName = "wrapRequestJsonInAnObject")] - public object WrapRequestJsonInAnObject { get; set; } + [JsonProperty(PropertyName = "httpCompressionType")] + public object HttpCompressionType { get; set; } } } diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj index faa5abee941f..230944aa27fa 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj @@ -14,6 +14,7 @@ - Support Tar GZip compression type in Data Factory - Added maxRowPerFile/fileNamePrefix to tabular format settings - Added support for AzureDatabricks delta lake +- Update Rest Sink properties ]]> diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/PipelineJsonSamples.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/PipelineJsonSamples.cs index 9c4be5ece212..4e0056e7ab9e 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/PipelineJsonSamples.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/PipelineJsonSamples.cs @@ -6849,8 +6849,7 @@ public class PipelineJsonSamples : JsonSampleCollection }, writeBatchSize: 1000, writeBatchTimeout: ""01:00:00"", - compressionType: ""gzip"", - wrapRequestJsonInAnObject: true, + httpCompressionType: ""gzip"" }, translator: { From 0390a63142e66abae6c9768c81e4bf96bcf3ca04 Mon Sep 17 00:00:00 2001 From: Jingshu Pan Date: Sun, 27 Sep 2020 13:20:33 +0800 Subject: [PATCH 5/9] Add MongoDbAtlas Source Connector in DataFactory --- .../datafactory_resource-manager.txt | 4 +- .../src/CHANGELOG.md | 2 + .../Models/MongoDbAtlasCollectionDataset.cs | 93 ++++++++++++++ .../Models/MongoDbAtlasLinkedService.cs | 98 ++++++++++++++ .../Generated/Models/MongoDbAtlasSource.cs | 121 ++++++++++++++++++ .../Generated/Models/OrcCompressionCodec.cs | 1 + .../src/Generated/Models/OrcDataset.cs | 5 +- ...rosoft.Azure.Management.DataFactory.csproj | 2 + .../tests/JsonSamples/DatasetJsonSamples.cs | 57 +++++++++ .../JsonSamples/LinkedServiceJsonSamples.cs | 18 +++ .../tests/JsonSamples/PipelineJsonSamples.cs | 58 +++++++++ 11 files changed, 455 insertions(+), 4 deletions(-) create mode 100644 sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/MongoDbAtlasCollectionDataset.cs create mode 100644 sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/MongoDbAtlasLinkedService.cs create mode 100644 sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/MongoDbAtlasSource.cs diff --git a/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt b/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt index 4d36e20ea352..42ab4a81e5ad 100644 --- a/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt +++ b/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt @@ -4,11 +4,11 @@ Commencing code generation Generating CSharp code Executing AutoRest command cmd.exe /c autorest.cmd https://github.com/Azure/azure-rest-api-specs/blob/master/specification/datafactory/resource-manager/readme.md --csharp --version=v2 --reflect-api-versions --tag=package-2018-06 --csharp-sdks-folder=D:\Projects\azure-sdk-for-net\sdk -2020-09-23 09:36:41 UTC +2020-09-27 01:55:09 UTC Azure-rest-api-specs repository information GitHub fork: Azure Branch: master -Commit: bfc3af3be11659794186df73982a11d3c84ec05a +Commit: 7d9eaf4c9866664806a2202b02e3ab5259c86cf9 AutoRest information Requested version: v2 Bootstrapper version: autorest@2.0.4413 diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md index f35ab856d6c1..9a04b204b8e1 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md @@ -7,6 +7,8 @@ - Added maxRowPerFile/fileNamePrefix to tabular format settings - Added support for AzureDatabricks delta lake - Update Rest Sink properties +- Added support lzo in orc format +- Added MongoDbAtlas Source Connector in DataFactory ## Version 4.11.0 ### Feature Additions diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/MongoDbAtlasCollectionDataset.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/MongoDbAtlasCollectionDataset.cs new file mode 100644 index 000000000000..538a4f2bd585 --- /dev/null +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/MongoDbAtlasCollectionDataset.cs @@ -0,0 +1,93 @@ +// +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for +// license information. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is +// regenerated. +// + +namespace Microsoft.Azure.Management.DataFactory.Models +{ + using Microsoft.Rest; + using Microsoft.Rest.Serialization; + using Newtonsoft.Json; + using System.Collections; + using System.Collections.Generic; + using System.Linq; + + /// + /// The MongoDB Atlas database dataset. + /// + [Newtonsoft.Json.JsonObject("MongoDbAtlasCollection")] + [Rest.Serialization.JsonTransformation] + public partial class MongoDbAtlasCollectionDataset : Dataset + { + /// + /// Initializes a new instance of the MongoDbAtlasCollectionDataset + /// class. + /// + public MongoDbAtlasCollectionDataset() + { + LinkedServiceName = new LinkedServiceReference(); + CustomInit(); + } + + /// + /// Initializes a new instance of the MongoDbAtlasCollectionDataset + /// class. + /// + /// Linked service reference. + /// The collection name of the MongoDB Atlas + /// database. Type: string (or Expression with resultType + /// string). + /// Unmatched properties from the + /// message are deserialized this collection + /// Dataset description. + /// Columns that define the structure of the + /// dataset. Type: array (or Expression with resultType array), + /// itemType: DatasetDataElement. + /// Columns that define the physical type schema + /// of the dataset. Type: array (or Expression with resultType array), + /// itemType: DatasetSchemaDataElement. + /// Parameters for dataset. + /// List of tags that can be used for + /// describing the Dataset. + /// The folder that this Dataset is in. If not + /// specified, Dataset will appear at the root level. + public MongoDbAtlasCollectionDataset(LinkedServiceReference linkedServiceName, object collection, IDictionary additionalProperties = default(IDictionary), string description = default(string), object structure = default(object), object schema = default(object), IDictionary parameters = default(IDictionary), IList annotations = default(IList), DatasetFolder folder = default(DatasetFolder)) + : base(linkedServiceName, additionalProperties, description, structure, schema, parameters, annotations, folder) + { + Collection = collection; + CustomInit(); + } + + /// + /// An initialization method that performs custom operations like setting defaults + /// + partial void CustomInit(); + + /// + /// Gets or sets the collection name of the MongoDB Atlas database. + /// Type: string (or Expression with resultType string). + /// + [JsonProperty(PropertyName = "typeProperties.collection")] + public object Collection { get; set; } + + /// + /// Validate the object. + /// + /// + /// Thrown if validation fails + /// + public override void Validate() + { + base.Validate(); + if (Collection == null) + { + throw new ValidationException(ValidationRules.CannotBeNull, "Collection"); + } + } + } +} diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/MongoDbAtlasLinkedService.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/MongoDbAtlasLinkedService.cs new file mode 100644 index 000000000000..b57c7c28c6ce --- /dev/null +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/MongoDbAtlasLinkedService.cs @@ -0,0 +1,98 @@ +// +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for +// license information. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is +// regenerated. +// + +namespace Microsoft.Azure.Management.DataFactory.Models +{ + using Microsoft.Rest; + using Microsoft.Rest.Serialization; + using Newtonsoft.Json; + using System.Collections; + using System.Collections.Generic; + using System.Linq; + + /// + /// Linked service for MongoDB Atlas data source. + /// + [Newtonsoft.Json.JsonObject("MongoDbAtlas")] + [Rest.Serialization.JsonTransformation] + public partial class MongoDbAtlasLinkedService : LinkedService + { + /// + /// Initializes a new instance of the MongoDbAtlasLinkedService class. + /// + public MongoDbAtlasLinkedService() + { + CustomInit(); + } + + /// + /// Initializes a new instance of the MongoDbAtlasLinkedService class. + /// + /// The MongoDB Atlas connection string. + /// Type: string, SecureString or AzureKeyVaultSecretReference. Type: + /// string, SecureString or AzureKeyVaultSecretReference. + /// The name of the MongoDB Atlas database that + /// you want to access. Type: string (or Expression with resultType + /// string). + /// Unmatched properties from the + /// message are deserialized this collection + /// The integration runtime reference. + /// Linked service description. + /// Parameters for linked service. + /// List of tags that can be used for + /// describing the linked service. + public MongoDbAtlasLinkedService(object connectionString, object database, IDictionary additionalProperties = default(IDictionary), IntegrationRuntimeReference connectVia = default(IntegrationRuntimeReference), string description = default(string), IDictionary parameters = default(IDictionary), IList annotations = default(IList)) + : base(additionalProperties, connectVia, description, parameters, annotations) + { + ConnectionString = connectionString; + Database = database; + CustomInit(); + } + + /// + /// An initialization method that performs custom operations like setting defaults + /// + partial void CustomInit(); + + /// + /// Gets or sets the MongoDB Atlas connection string. Type: string, + /// SecureString or AzureKeyVaultSecretReference. Type: string, + /// SecureString or AzureKeyVaultSecretReference. + /// + [JsonProperty(PropertyName = "typeProperties.connectionString")] + public object ConnectionString { get; set; } + + /// + /// Gets or sets the name of the MongoDB Atlas database that you want + /// to access. Type: string (or Expression with resultType string). + /// + [JsonProperty(PropertyName = "typeProperties.database")] + public object Database { get; set; } + + /// + /// Validate the object. + /// + /// + /// Thrown if validation fails + /// + public override void Validate() + { + base.Validate(); + if (ConnectionString == null) + { + throw new ValidationException(ValidationRules.CannotBeNull, "ConnectionString"); + } + if (Database == null) + { + throw new ValidationException(ValidationRules.CannotBeNull, "Database"); + } + } + } +} diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/MongoDbAtlasSource.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/MongoDbAtlasSource.cs new file mode 100644 index 000000000000..1b1e8eb67779 --- /dev/null +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/MongoDbAtlasSource.cs @@ -0,0 +1,121 @@ +// +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for +// license information. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is +// regenerated. +// + +namespace Microsoft.Azure.Management.DataFactory.Models +{ + using Newtonsoft.Json; + using System.Collections; + using System.Collections.Generic; + using System.Linq; + + /// + /// A copy activity source for a MongoDB Atlas database. + /// + public partial class MongoDbAtlasSource : CopySource + { + /// + /// Initializes a new instance of the MongoDbAtlasSource class. + /// + public MongoDbAtlasSource() + { + CustomInit(); + } + + /// + /// Initializes a new instance of the MongoDbAtlasSource class. + /// + /// Unmatched properties from the + /// message are deserialized this collection + /// Source retry count. Type: integer + /// (or Expression with resultType integer). + /// Source retry wait. Type: string (or + /// Expression with resultType string), pattern: + /// ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + /// The maximum concurrent + /// connection count for the source data store. Type: integer (or + /// Expression with resultType integer). + /// Specifies selection filter using query + /// operators. To return all documents in a collection, omit this + /// parameter or pass an empty document ({}). Type: string (or + /// Expression with resultType string). + /// Cursor methods for Mongodb + /// query + /// Specifies the number of documents to return + /// in each batch of the response from MongoDB Atlas instance. In most + /// cases, modifying the batch size will not affect the user or the + /// application. This property's main purpose is to avoid hit the + /// limitation of response size. Type: integer (or Expression with + /// resultType integer). + /// Query timeout. Type: string (or + /// Expression with resultType string), pattern: + /// ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + /// Specifies the additional columns to + /// be added to source data. Type: array of objects (or Expression with + /// resultType array of objects). + public MongoDbAtlasSource(IDictionary additionalProperties = default(IDictionary), object sourceRetryCount = default(object), object sourceRetryWait = default(object), object maxConcurrentConnections = default(object), object filter = default(object), MongoDbCursorMethodsProperties cursorMethods = default(MongoDbCursorMethodsProperties), object batchSize = default(object), object queryTimeout = default(object), IList additionalColumns = default(IList)) + : base(additionalProperties, sourceRetryCount, sourceRetryWait, maxConcurrentConnections) + { + Filter = filter; + CursorMethods = cursorMethods; + BatchSize = batchSize; + QueryTimeout = queryTimeout; + AdditionalColumns = additionalColumns; + CustomInit(); + } + + /// + /// An initialization method that performs custom operations like setting defaults + /// + partial void CustomInit(); + + /// + /// Gets or sets specifies selection filter using query operators. To + /// return all documents in a collection, omit this parameter or pass + /// an empty document ({}). Type: string (or Expression with resultType + /// string). + /// + [JsonProperty(PropertyName = "filter")] + public object Filter { get; set; } + + /// + /// Gets or sets cursor methods for Mongodb query + /// + [JsonProperty(PropertyName = "cursorMethods")] + public MongoDbCursorMethodsProperties CursorMethods { get; set; } + + /// + /// Gets or sets specifies the number of documents to return in each + /// batch of the response from MongoDB Atlas instance. In most cases, + /// modifying the batch size will not affect the user or the + /// application. This property's main purpose is to avoid hit the + /// limitation of response size. Type: integer (or Expression with + /// resultType integer). + /// + [JsonProperty(PropertyName = "batchSize")] + public object BatchSize { get; set; } + + /// + /// Gets or sets query timeout. Type: string (or Expression with + /// resultType string), pattern: + /// ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + /// + [JsonProperty(PropertyName = "queryTimeout")] + public object QueryTimeout { get; set; } + + /// + /// Gets or sets specifies the additional columns to be added to source + /// data. Type: array of objects (or Expression with resultType array + /// of objects). + /// + [JsonProperty(PropertyName = "additionalColumns")] + public IList AdditionalColumns { get; set; } + + } +} diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcCompressionCodec.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcCompressionCodec.cs index 4c34dd3a5833..39efad59247e 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcCompressionCodec.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcCompressionCodec.cs @@ -19,5 +19,6 @@ public static class OrcCompressionCodec public const string None = "none"; public const string Zlib = "zlib"; public const string Snappy = "snappy"; + public const string Lzo = "lzo"; } } diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcDataset.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcDataset.cs index 776f3b0a59eb..8a368a17b352 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcDataset.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/OrcDataset.cs @@ -54,7 +54,7 @@ public OrcDataset() /// The folder that this Dataset is in. If not /// specified, Dataset will appear at the root level. /// Possible values include: 'none', - /// 'zlib', 'snappy' + /// 'zlib', 'snappy', 'lzo' public OrcDataset(LinkedServiceReference linkedServiceName, DatasetLocation location, IDictionary additionalProperties = default(IDictionary), string description = default(string), object structure = default(object), object schema = default(object), IDictionary parameters = default(IDictionary), IList annotations = default(IList), DatasetFolder folder = default(DatasetFolder), string orcCompressionCodec = default(string)) : base(linkedServiceName, additionalProperties, description, structure, schema, parameters, annotations, folder) { @@ -75,7 +75,8 @@ public OrcDataset() public DatasetLocation Location { get; set; } /// - /// Gets or sets possible values include: 'none', 'zlib', 'snappy' + /// Gets or sets possible values include: 'none', 'zlib', 'snappy', + /// 'lzo' /// [JsonProperty(PropertyName = "typeProperties.orcCompressionCodec")] public string OrcCompressionCodec { get; set; } diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj index 230944aa27fa..99f54f60339c 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj @@ -15,6 +15,8 @@ - Added maxRowPerFile/fileNamePrefix to tabular format settings - Added support for AzureDatabricks delta lake - Update Rest Sink properties +- Added support lzo in orc format +- Added MongoDbAtlas Source Connector in DataFactory ]]> diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/DatasetJsonSamples.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/DatasetJsonSamples.cs index caae96128822..28d3dcece794 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/DatasetJsonSamples.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/DatasetJsonSamples.cs @@ -361,6 +361,29 @@ public class DatasetJsonSamples : JsonSampleCollection } } } +"; + + [JsonSample] + public const string MongoDbAtlasCollection = @" +{ + name: ""MongoDbAtlasDbTable"", + properties: { + type: ""MongoDbAtlasCollection"", + linkedServiceName: + { + referenceName : ""ls"", + type : ""LinkedServiceReference"" + }, + typeProperties: { + collection: ""fake table"" + }, + parameters: { + MyCollection: { + type: ""String"" + } + } + } +} "; [JsonSample] @@ -1339,6 +1362,40 @@ public class DatasetJsonSamples : JsonSampleCollection } }"; + [JsonSample] + public const string OrcDatasetWithlzoCompressionCodec = @" +{ + ""name"": ""OrcDataset"", + ""properties"": { + ""type"": ""Orc"", + ""linkedServiceName"": { + ""referenceName"": ""AzureBlobStorageLinkedService"", + ""type"": ""LinkedServiceReference"" + }, + ""typeProperties"": { + ""location"": { + ""type"": ""AzureBlobStorageLocation"", + ""container"": ""ContainerName"", + ""folderPath"": ""dataflow/test/input"", + ""fileName"": ""data.orc"" + }, + ""orcCompressionCodec"": ""lzo"" + }, + ""schema"": [ + { + ""name"": ""col1"", + ""type"": ""INT_32"" + }, + { + ""name"": ""col2"", + ""type"": ""Decimal"", + ""precision"": ""38"", + ""scale"": ""2"" + } + ] + } +}"; + [JsonSample] public const string TeradataDataset = @" { diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs index b1ebc369f2c3..ed3f199d3e4b 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs @@ -2025,6 +2025,24 @@ public class LinkedServiceJsonSamples : JsonSampleCollection ] } } +"; + + [JsonSample] + public const string MongoDbAtlasCopyActivity = @" +{ + ""name"": ""ExampleCopyActivity"", + ""properties"": { + ""activities"": [ + { + ""name"": ""MyActivity"", + ""type"": ""Copy"", + ""typeProperties"": { + ""source"": { + ""type"": ""MongoDbAtlasSource"", + ""filter"": { + ""value"": ""@dataset().MyFilter"" + }, + ""cursorMethods"": { + ""project"": { + ""value"": ""@dataset().MyProject"", + ""type"": ""Expression"" + }, + ""sort"": ""{ age : 1 }"", + ""skip"": ""3"", + ""limit"": ""3"" + }, + ""batchSize"": ""5"" + }, + ""sink"": { + ""type"": ""CosmosDbMongoDbApiSink"", + ""writeBehavior"": ""upsert"", + ""writeBatchSize"": ""5000"" + } + }, + ""inputs"": [ + { + ""referenceName"": ""exampleSourceDataset"", + ""type"": ""DatasetReference"" + } + ], + ""outputs"": [ + { + ""referenceName"": ""exampleSinkDataset"", + ""type"": ""DatasetReference"" + } + ] + } + ], + ""parameters"": { + ""MyFilter"": { + ""type"": ""String"" + }, + ""MyProject"": { + ""type"": ""String"" + } + } +} +} "; [JsonSample] From c17d50c6f4b86f360c79a2cffe9ad8c1b5bd325e Mon Sep 17 00:00:00 2001 From: Jingshu Pan Date: Mon, 28 Sep 2020 17:12:15 +0800 Subject: [PATCH 6/9] change mongoDB connection string --- .../tests/JsonSamples/LinkedServiceJsonSamples.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs index ed3f199d3e4b..0f9f191d1f3c 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs @@ -2035,7 +2035,7 @@ public class LinkedServiceJsonSamples : JsonSampleCollection Date: Wed, 14 Oct 2020 17:27:46 +0800 Subject: [PATCH 7/9] [DataFactory] Added publicNetworkAccess property to datafactory --- .../datafactory_resource-manager.txt | 4 ++-- .../src/CHANGELOG.md | 4 ++++ .../src/Generated/Models/Factory.cs | 13 ++++++++++- .../Generated/Models/PublicNetworkAccess.cs | 22 +++++++++++++++++++ ...rosoft.Azure.Management.DataFactory.csproj | 10 ++------- .../src/Properties/AssemblyInfo.cs | 4 ++-- 6 files changed, 44 insertions(+), 13 deletions(-) create mode 100644 sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/PublicNetworkAccess.cs diff --git a/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt b/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt index 42ab4a81e5ad..36298a2ccb4f 100644 --- a/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt +++ b/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt @@ -4,11 +4,11 @@ Commencing code generation Generating CSharp code Executing AutoRest command cmd.exe /c autorest.cmd https://github.com/Azure/azure-rest-api-specs/blob/master/specification/datafactory/resource-manager/readme.md --csharp --version=v2 --reflect-api-versions --tag=package-2018-06 --csharp-sdks-folder=D:\Projects\azure-sdk-for-net\sdk -2020-09-27 01:55:09 UTC +2020-10-14 09:13:49 UTC Azure-rest-api-specs repository information GitHub fork: Azure Branch: master -Commit: 7d9eaf4c9866664806a2202b02e3ab5259c86cf9 +Commit: d55d2645e973bd39f92da5433336a5a22ae096fc AutoRest information Requested version: v2 Bootstrapper version: autorest@2.0.4413 diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md index 9a04b204b8e1..b7cbc390a20e 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md @@ -1,5 +1,9 @@ # Changelog for the Azure Data Factory V2 .NET SDK +## Version 4.13.0 +### Feature Additions +- Added publicNetworkAccess property to datafactory + ## Version 4.12.0 ### Feature Additions - Added logLevel/enableReliableLogging to LogStorageSettings diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/Factory.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/Factory.cs index 9a3ef89d1f7c..e0db13ba3602 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/Factory.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/Factory.cs @@ -53,7 +53,10 @@ public Factory() /// factory. /// List of parameters for /// factory. - public Factory(string id = default(string), string name = default(string), string type = default(string), string location = default(string), IDictionary tags = default(IDictionary), string eTag = default(string), IDictionary additionalProperties = default(IDictionary), FactoryIdentity identity = default(FactoryIdentity), string provisioningState = default(string), System.DateTime? createTime = default(System.DateTime?), string version = default(string), FactoryRepoConfiguration repoConfiguration = default(FactoryRepoConfiguration), IDictionary globalParameters = default(IDictionary)) + /// Whether or not public network + /// access is allowed for the data factory. Possible values include: + /// 'Enabled', 'Disabled' + public Factory(string id = default(string), string name = default(string), string type = default(string), string location = default(string), IDictionary tags = default(IDictionary), string eTag = default(string), IDictionary additionalProperties = default(IDictionary), FactoryIdentity identity = default(FactoryIdentity), string provisioningState = default(string), System.DateTime? createTime = default(System.DateTime?), string version = default(string), FactoryRepoConfiguration repoConfiguration = default(FactoryRepoConfiguration), IDictionary globalParameters = default(IDictionary), string publicNetworkAccess = default(string)) : base(id, name, type, location, tags, eTag) { AdditionalProperties = additionalProperties; @@ -63,6 +66,7 @@ public Factory() Version = version; RepoConfiguration = repoConfiguration; GlobalParameters = globalParameters; + PublicNetworkAccess = publicNetworkAccess; CustomInit(); } @@ -114,6 +118,13 @@ public Factory() [JsonProperty(PropertyName = "properties.globalParameters")] public IDictionary GlobalParameters { get; set; } + /// + /// Gets or sets whether or not public network access is allowed for + /// the data factory. Possible values include: 'Enabled', 'Disabled' + /// + [JsonProperty(PropertyName = "properties.publicNetworkAccess")] + public string PublicNetworkAccess { get; set; } + /// /// Validate the object. /// diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/PublicNetworkAccess.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/PublicNetworkAccess.cs new file mode 100644 index 000000000000..384def984483 --- /dev/null +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/PublicNetworkAccess.cs @@ -0,0 +1,22 @@ +// +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for +// license information. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is +// regenerated. +// + +namespace Microsoft.Azure.Management.DataFactory.Models +{ + + /// + /// Defines values for PublicNetworkAccess. + /// + public static class PublicNetworkAccess + { + public const string Enabled = "Enabled"; + public const string Disabled = "Disabled"; + } +} diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj index 99f54f60339c..72b1393e49c4 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj @@ -5,18 +5,12 @@ Microsoft.Azure.Management.DataFactory Azure Data Factory V2 is the data integration platform that goes beyond Azure Data Factory V1's orchestration and batch-processing of time-series data, with a general purpose app model supporting modern data warehousing patterns and scenarios, lift-and-shift SSIS, and data-driven SaaS applications. Compose and manage reliable and secure data integration workflows at scale. Use native ADF data connectors and Integration Runtimes to move and transform cloud and on-premises data that can be unstructured, semi-structured, and structured with Hadoop, Azure Data Lake, Spark, SQL Server, Cosmos DB and many other data platforms. - 4.12.0 + 4.13.0 Microsoft.Azure.Management.DataFactory Microsoft Azure resource management;Data Factory;ADF; diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Properties/AssemblyInfo.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Properties/AssemblyInfo.cs index f3a607306111..bf155b9ab073 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Properties/AssemblyInfo.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Properties/AssemblyInfo.cs @@ -6,8 +6,8 @@ [assembly: AssemblyTitle("Microsoft Azure Data Factory Management Library")] [assembly: AssemblyDescription("Provides management functionality for Microsoft Azure Data Factory Resources.")] -[assembly: AssemblyVersion("4.12.0.0")] -[assembly: AssemblyFileVersion("4.12.0.0")] +[assembly: AssemblyVersion("4.13.0.0")] +[assembly: AssemblyFileVersion("4.13.0.0")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("Microsoft")] [assembly: AssemblyProduct("Microsoft Azure .NET SDK")] From 9992021cb3dc8afcdb82912a252d20207124e5ca Mon Sep 17 00:00:00 2001 From: Jingshu Pan Date: Thu, 15 Oct 2020 09:35:20 +0800 Subject: [PATCH 8/9] Update a test --- .../tests/ScenarioTests/DataFactoryScenarioTests.cs | 5 +++-- .../DataFactoryScenarioTests/DataFactoryCrud.json | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/ScenarioTests/DataFactoryScenarioTests.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/ScenarioTests/DataFactoryScenarioTests.cs index 0ea9dddc7d36..e9544976fc63 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/ScenarioTests/DataFactoryScenarioTests.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/ScenarioTests/DataFactoryScenarioTests.cs @@ -17,7 +17,7 @@ namespace DataFactory.Tests.ScenarioTests { public class DataFactoryScenarioTests : ScenarioTestBase { - public Factory expectedFactory = new Factory(location: FactoryLocation); + public Factory expectedFactory = new Factory(location: FactoryLocation, publicNetworkAccess: "true"); [Fact] [Trait(TraitName.TestType, TestType.Scenario)] @@ -40,7 +40,8 @@ public async Task DataFactoryCrud() internal static async Task Create(DataFactoryManagementClient client, string resourceGroupName, string dataFactoryName, Factory expectedFactory) { AzureOperationResponse createResponse = await client.Factories.CreateOrUpdateWithHttpMessagesAsync(resourceGroupName, dataFactoryName, expectedFactory); - ValidateFactory(createResponse.Body,dataFactoryName); + ValidateFactory(createResponse.Body, dataFactoryName); + Assert.Equal(createResponse.Body.PublicNetworkAccess, expectedFactory.PublicNetworkAccess); Assert.Equal(HttpStatusCode.OK, createResponse.Response.StatusCode); AzureOperationResponse getResponse = await client.Factories.GetWithHttpMessagesAsync(resourceGroupName, dataFactoryName); diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/SessionRecords/DataFactoryScenarioTests/DataFactoryCrud.json b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/SessionRecords/DataFactoryScenarioTests/DataFactoryCrud.json index 14d167bdddec..19590a2d8517 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/SessionRecords/DataFactoryScenarioTests/DataFactoryCrud.json +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/SessionRecords/DataFactoryScenarioTests/DataFactoryCrud.json @@ -87,7 +87,7 @@ "Microsoft.Azure.Management.DataFactory.DataFactoryManagementClient/0.8.0.0" ] }, - "ResponseBody": "{\r\n \"name\": \"sdktestingfactory9776\",\r\n \"id\": \"/subscriptions/c39dce18-cead-4065-8fb1-3af7683a5038/resourceGroups/sdktestingadfrg4712/providers/Microsoft.DataFactory/factories/sdktestingfactory9776\",\r\n \"type\": \"Microsoft.DataFactory/factories\",\r\n \"properties\": {\r\n \"provisioningState\": \"Succeeded\",\r\n \"createTime\": \"2018-06-22T03:42:16.6466913Z\",\r\n \"version\": \"2018-06-01\",\r\n \"defaults\": {}\r\n },\r\n \"eTag\": \"\\\"0000cd05-0000-0000-0000-5b2c70180000\\\"\",\r\n \"location\": \"West US\",\r\n \"tags\": {}\r\n}", + "ResponseBody": "{\r\n \"name\": \"sdktestingfactory9776\",\r\n \"id\": \"/subscriptions/c39dce18-cead-4065-8fb1-3af7683a5038/resourceGroups/sdktestingadfrg4712/providers/Microsoft.DataFactory/factories/sdktestingfactory9776\",\r\n \"type\": \"Microsoft.DataFactory/factories\",\r\n \"properties\": {\r\n \"provisioningState\": \"Succeeded\",\r\n \"createTime\": \"2018-06-22T03:42:16.6466913Z\",\r\n \"version\": \"2018-06-01\",\r\n \"defaults\": {},\r\n \"publicNetworkAccess\": \"true\" },\r\n \"eTag\": \"\\\"0000cd05-0000-0000-0000-5b2c70180000\\\"\",\r\n \"location\": \"West US\",\r\n \"tags\": {}\r\n}", "ResponseHeaders": { "Content-Type": [ "application/json; charset=utf-8" From 65077e7accf57b66ab099a6d2535814cd1c55a24 Mon Sep 17 00:00:00 2001 From: Jingshu Pan Date: Fri, 16 Oct 2020 17:46:24 +0800 Subject: [PATCH 9/9] Add 3 change into pr --- .../datafactory_resource-manager.txt | 4 +- .../src/CHANGELOG.md | 3 + .../Generated/Models/AmazonS3LinkedService.cs | 24 ++++- .../Generated/Models/ConcurLinkedService.cs | 14 ++- .../src/Generated/Models/CopyActivity.cs | 24 ++++- .../Models/CopyActivityLogSettings.cs | 65 +++++++++++++ .../Generated/Models/LogLocationSettings.cs | 83 +++++++++++++++++ .../src/Generated/Models/LogSettings.cs | 92 +++++++++++++++++++ .../Generated/Models/LogStorageSettings.cs | 2 +- ...rosoft.Azure.Management.DataFactory.csproj | 3 + .../JsonSamples/LinkedServiceJsonSamples.cs | 48 ++++++++++ .../tests/JsonSamples/PipelineJsonSamples.cs | 79 ++++++++++++++++ 12 files changed, 431 insertions(+), 10 deletions(-) create mode 100644 sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/CopyActivityLogSettings.cs create mode 100644 sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/LogLocationSettings.cs create mode 100644 sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/LogSettings.cs diff --git a/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt b/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt index 36298a2ccb4f..e885e62e9249 100644 --- a/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt +++ b/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt @@ -4,11 +4,11 @@ Commencing code generation Generating CSharp code Executing AutoRest command cmd.exe /c autorest.cmd https://github.com/Azure/azure-rest-api-specs/blob/master/specification/datafactory/resource-manager/readme.md --csharp --version=v2 --reflect-api-versions --tag=package-2018-06 --csharp-sdks-folder=D:\Projects\azure-sdk-for-net\sdk -2020-10-14 09:13:49 UTC +2020-10-16 09:04:28 UTC Azure-rest-api-specs repository information GitHub fork: Azure Branch: master -Commit: d55d2645e973bd39f92da5433336a5a22ae096fc +Commit: ce71a0bd91903fe312dc5ddb9b3e520755d6dd77 AutoRest information Requested version: v2 Bootstrapper version: autorest@2.0.4413 diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md index b7cbc390a20e..ec1d87c1af0f 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md @@ -3,6 +3,9 @@ ## Version 4.13.0 ### Feature Additions - Added publicNetworkAccess property to datafactory +- Added logSettings property into CopyActivity +- Added connectionProperties property into Concur linkedService +- Added authenticationType and sessionToken properties into AmazonS3 linkedService ## Version 4.12.0 ### Feature Additions diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AmazonS3LinkedService.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AmazonS3LinkedService.cs index e81f16e28efe..aa98907ffe46 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AmazonS3LinkedService.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AmazonS3LinkedService.cs @@ -42,6 +42,9 @@ public AmazonS3LinkedService() /// Parameters for linked service. /// List of tags that can be used for /// describing the linked service. + /// The authentication type of S3. + /// Allowed value: AccessKey (default) or TemporarySecurityCredentials. + /// Type: string (or Expression with resultType string). /// The access key identifier of the Amazon /// S3 Identity and Access Management (IAM) user. Type: string (or /// Expression with resultType string). @@ -52,16 +55,20 @@ public AmazonS3LinkedService() /// it only if you want to try a different service endpoint or want to /// switch between https and http. Type: string (or Expression with /// resultType string). + /// The session token for the S3 temporary + /// security credential. /// The encrypted credential used for /// authentication. Credentials are encrypted using the integration /// runtime credential manager. Type: string (or Expression with /// resultType string). - public AmazonS3LinkedService(IDictionary additionalProperties = default(IDictionary), IntegrationRuntimeReference connectVia = default(IntegrationRuntimeReference), string description = default(string), IDictionary parameters = default(IDictionary), IList annotations = default(IList), object accessKeyId = default(object), SecretBase secretAccessKey = default(SecretBase), object serviceUrl = default(object), object encryptedCredential = default(object)) + public AmazonS3LinkedService(IDictionary additionalProperties = default(IDictionary), IntegrationRuntimeReference connectVia = default(IntegrationRuntimeReference), string description = default(string), IDictionary parameters = default(IDictionary), IList annotations = default(IList), object authenticationType = default(object), object accessKeyId = default(object), SecretBase secretAccessKey = default(SecretBase), object serviceUrl = default(object), SecretBase sessionToken = default(SecretBase), object encryptedCredential = default(object)) : base(additionalProperties, connectVia, description, parameters, annotations) { + AuthenticationType = authenticationType; AccessKeyId = accessKeyId; SecretAccessKey = secretAccessKey; ServiceUrl = serviceUrl; + SessionToken = sessionToken; EncryptedCredential = encryptedCredential; CustomInit(); } @@ -71,6 +78,14 @@ public AmazonS3LinkedService() /// partial void CustomInit(); + /// + /// Gets or sets the authentication type of S3. Allowed value: + /// AccessKey (default) or TemporarySecurityCredentials. Type: string + /// (or Expression with resultType string). + /// + [JsonProperty(PropertyName = "typeProperties.authenticationType")] + public object AuthenticationType { get; set; } + /// /// Gets or sets the access key identifier of the Amazon S3 Identity /// and Access Management (IAM) user. Type: string (or Expression with @@ -96,6 +111,13 @@ public AmazonS3LinkedService() [JsonProperty(PropertyName = "typeProperties.serviceUrl")] public object ServiceUrl { get; set; } + /// + /// Gets or sets the session token for the S3 temporary security + /// credential. + /// + [JsonProperty(PropertyName = "typeProperties.sessionToken")] + public SecretBase SessionToken { get; set; } + /// /// Gets or sets the encrypted credential used for authentication. /// Credentials are encrypted using the integration runtime credential diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/ConcurLinkedService.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/ConcurLinkedService.cs index ad5cd52c9051..f8f169009ba4 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/ConcurLinkedService.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/ConcurLinkedService.cs @@ -46,6 +46,9 @@ public ConcurLinkedService() /// Parameters for linked service. /// List of tags that can be used for /// describing the linked service. + /// Properties used to connect to + /// Concur. It is mutually exclusive with any other properties in the + /// linked service. Type: object. /// The password corresponding to the user name /// that you provided in the username field. /// Specifies whether the data @@ -61,9 +64,10 @@ public ConcurLinkedService() /// authentication. Credentials are encrypted using the integration /// runtime credential manager. Type: string (or Expression with /// resultType string). - public ConcurLinkedService(object clientId, object username, IDictionary additionalProperties = default(IDictionary), IntegrationRuntimeReference connectVia = default(IntegrationRuntimeReference), string description = default(string), IDictionary parameters = default(IDictionary), IList annotations = default(IList), SecretBase password = default(SecretBase), object useEncryptedEndpoints = default(object), object useHostVerification = default(object), object usePeerVerification = default(object), object encryptedCredential = default(object)) + public ConcurLinkedService(object clientId, object username, IDictionary additionalProperties = default(IDictionary), IntegrationRuntimeReference connectVia = default(IntegrationRuntimeReference), string description = default(string), IDictionary parameters = default(IDictionary), IList annotations = default(IList), object connectionProperties = default(object), SecretBase password = default(SecretBase), object useEncryptedEndpoints = default(object), object useHostVerification = default(object), object usePeerVerification = default(object), object encryptedCredential = default(object)) : base(additionalProperties, connectVia, description, parameters, annotations) { + ConnectionProperties = connectionProperties; ClientId = clientId; Username = username; Password = password; @@ -79,6 +83,14 @@ public ConcurLinkedService() /// partial void CustomInit(); + /// + /// Gets or sets properties used to connect to Concur. It is mutually + /// exclusive with any other properties in the linked service. Type: + /// object. + /// + [JsonProperty(PropertyName = "typeProperties.connectionProperties")] + public object ConnectionProperties { get; set; } + /// /// Gets or sets application client_id supplied by Concur App /// Management. diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/CopyActivity.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/CopyActivity.cs index f65561ed3f87..e8ad9ea11e78 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/CopyActivity.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/CopyActivity.cs @@ -65,8 +65,11 @@ public CopyActivity() /// Expression with resultType boolean). /// Redirect incompatible /// row settings when EnableSkipIncompatibleRow is true. - /// Log storage settings customer need - /// to provide when enabling session log. + /// (Deprecated. Please use + /// LogSettings) Log storage settings customer need to provide when + /// enabling session log. + /// Log settings customer needs provide when + /// enabling log. /// Preserve Rules. /// Preserve rules. /// Whether to enable Data @@ -76,7 +79,7 @@ public CopyActivity() /// consistency. /// List of inputs for the activity. /// List of outputs for the activity. - public CopyActivity(string name, CopySource source, CopySink sink, IDictionary additionalProperties = default(IDictionary), string description = default(string), IList dependsOn = default(IList), IList userProperties = default(IList), LinkedServiceReference linkedServiceName = default(LinkedServiceReference), ActivityPolicy policy = default(ActivityPolicy), object translator = default(object), object enableStaging = default(object), StagingSettings stagingSettings = default(StagingSettings), object parallelCopies = default(object), object dataIntegrationUnits = default(object), object enableSkipIncompatibleRow = default(object), RedirectIncompatibleRowSettings redirectIncompatibleRowSettings = default(RedirectIncompatibleRowSettings), LogStorageSettings logStorageSettings = default(LogStorageSettings), IList preserveRules = default(IList), IList preserve = default(IList), object validateDataConsistency = default(object), SkipErrorFile skipErrorFile = default(SkipErrorFile), IList inputs = default(IList), IList outputs = default(IList)) + public CopyActivity(string name, CopySource source, CopySink sink, IDictionary additionalProperties = default(IDictionary), string description = default(string), IList dependsOn = default(IList), IList userProperties = default(IList), LinkedServiceReference linkedServiceName = default(LinkedServiceReference), ActivityPolicy policy = default(ActivityPolicy), object translator = default(object), object enableStaging = default(object), StagingSettings stagingSettings = default(StagingSettings), object parallelCopies = default(object), object dataIntegrationUnits = default(object), object enableSkipIncompatibleRow = default(object), RedirectIncompatibleRowSettings redirectIncompatibleRowSettings = default(RedirectIncompatibleRowSettings), LogStorageSettings logStorageSettings = default(LogStorageSettings), LogSettings logSettings = default(LogSettings), IList preserveRules = default(IList), IList preserve = default(IList), object validateDataConsistency = default(object), SkipErrorFile skipErrorFile = default(SkipErrorFile), IList inputs = default(IList), IList outputs = default(IList)) : base(name, additionalProperties, description, dependsOn, userProperties, linkedServiceName, policy) { Source = source; @@ -89,6 +92,7 @@ public CopyActivity() EnableSkipIncompatibleRow = enableSkipIncompatibleRow; RedirectIncompatibleRowSettings = redirectIncompatibleRowSettings; LogStorageSettings = logStorageSettings; + LogSettings = logSettings; PreserveRules = preserveRules; Preserve = preserve; ValidateDataConsistency = validateDataConsistency; @@ -168,12 +172,18 @@ public CopyActivity() public RedirectIncompatibleRowSettings RedirectIncompatibleRowSettings { get; set; } /// - /// Gets or sets log storage settings customer need to provide when - /// enabling session log. + /// Gets or sets (Deprecated. Please use LogSettings) Log storage + /// settings customer need to provide when enabling session log. /// [JsonProperty(PropertyName = "typeProperties.logStorageSettings")] public LogStorageSettings LogStorageSettings { get; set; } + /// + /// Gets or sets log settings customer needs provide when enabling log. + /// + [JsonProperty(PropertyName = "typeProperties.logSettings")] + public LogSettings LogSettings { get; set; } + /// /// Gets or sets preserve Rules. /// @@ -240,6 +250,10 @@ public override void Validate() { LogStorageSettings.Validate(); } + if (LogSettings != null) + { + LogSettings.Validate(); + } if (Inputs != null) { foreach (var element in Inputs) diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/CopyActivityLogSettings.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/CopyActivityLogSettings.cs new file mode 100644 index 000000000000..39faed8f717a --- /dev/null +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/CopyActivityLogSettings.cs @@ -0,0 +1,65 @@ +// +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for +// license information. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is +// regenerated. +// + +namespace Microsoft.Azure.Management.DataFactory.Models +{ + using Newtonsoft.Json; + using System.Linq; + + /// + /// Settings for copy activity log. + /// + public partial class CopyActivityLogSettings + { + /// + /// Initializes a new instance of the CopyActivityLogSettings class. + /// + public CopyActivityLogSettings() + { + CustomInit(); + } + + /// + /// Initializes a new instance of the CopyActivityLogSettings class. + /// + /// Gets or sets the log level, support: Info, + /// Warning. Type: string (or Expression with resultType + /// string). + /// Specifies whether to enable + /// reliable logging. Type: boolean (or Expression with resultType + /// boolean). + public CopyActivityLogSettings(object logLevel = default(object), object enableReliableLogging = default(object)) + { + LogLevel = logLevel; + EnableReliableLogging = enableReliableLogging; + CustomInit(); + } + + /// + /// An initialization method that performs custom operations like setting defaults + /// + partial void CustomInit(); + + /// + /// Gets or sets the log level, support: Info, Warning. Type: string + /// (or Expression with resultType string). + /// + [JsonProperty(PropertyName = "logLevel")] + public object LogLevel { get; set; } + + /// + /// Gets or sets specifies whether to enable reliable logging. Type: + /// boolean (or Expression with resultType boolean). + /// + [JsonProperty(PropertyName = "enableReliableLogging")] + public object EnableReliableLogging { get; set; } + + } +} diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/LogLocationSettings.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/LogLocationSettings.cs new file mode 100644 index 000000000000..e8af925646c8 --- /dev/null +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/LogLocationSettings.cs @@ -0,0 +1,83 @@ +// +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for +// license information. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is +// regenerated. +// + +namespace Microsoft.Azure.Management.DataFactory.Models +{ + using Microsoft.Rest; + using Newtonsoft.Json; + using System.Linq; + + /// + /// Log location settings. + /// + public partial class LogLocationSettings + { + /// + /// Initializes a new instance of the LogLocationSettings class. + /// + public LogLocationSettings() + { + LinkedServiceName = new LinkedServiceReference(); + CustomInit(); + } + + /// + /// Initializes a new instance of the LogLocationSettings class. + /// + /// Log storage linked service + /// reference. + /// The path to storage for storing detailed logs of + /// activity execution. Type: string (or Expression with resultType + /// string). + public LogLocationSettings(LinkedServiceReference linkedServiceName, object path = default(object)) + { + LinkedServiceName = linkedServiceName; + Path = path; + CustomInit(); + } + + /// + /// An initialization method that performs custom operations like setting defaults + /// + partial void CustomInit(); + + /// + /// Gets or sets log storage linked service reference. + /// + [JsonProperty(PropertyName = "linkedServiceName")] + public LinkedServiceReference LinkedServiceName { get; set; } + + /// + /// Gets or sets the path to storage for storing detailed logs of + /// activity execution. Type: string (or Expression with resultType + /// string). + /// + [JsonProperty(PropertyName = "path")] + public object Path { get; set; } + + /// + /// Validate the object. + /// + /// + /// Thrown if validation fails + /// + public virtual void Validate() + { + if (LinkedServiceName == null) + { + throw new ValidationException(ValidationRules.CannotBeNull, "LinkedServiceName"); + } + if (LinkedServiceName != null) + { + LinkedServiceName.Validate(); + } + } + } +} diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/LogSettings.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/LogSettings.cs new file mode 100644 index 000000000000..be026f31fb96 --- /dev/null +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/LogSettings.cs @@ -0,0 +1,92 @@ +// +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for +// license information. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is +// regenerated. +// + +namespace Microsoft.Azure.Management.DataFactory.Models +{ + using Microsoft.Rest; + using Newtonsoft.Json; + using System.Linq; + + /// + /// Log settings. + /// + public partial class LogSettings + { + /// + /// Initializes a new instance of the LogSettings class. + /// + public LogSettings() + { + LogLocationSettings = new LogLocationSettings(); + CustomInit(); + } + + /// + /// Initializes a new instance of the LogSettings class. + /// + /// Log location settings customer + /// needs to provide when enabling log. + /// Specifies whether to enable + /// copy activity log. Type: boolean (or Expression with resultType + /// boolean). + /// Specifies settings for copy + /// activity log. + public LogSettings(LogLocationSettings logLocationSettings, object enableCopyActivityLog = default(object), CopyActivityLogSettings copyActivityLogSettings = default(CopyActivityLogSettings)) + { + EnableCopyActivityLog = enableCopyActivityLog; + CopyActivityLogSettings = copyActivityLogSettings; + LogLocationSettings = logLocationSettings; + CustomInit(); + } + + /// + /// An initialization method that performs custom operations like setting defaults + /// + partial void CustomInit(); + + /// + /// Gets or sets specifies whether to enable copy activity log. Type: + /// boolean (or Expression with resultType boolean). + /// + [JsonProperty(PropertyName = "enableCopyActivityLog")] + public object EnableCopyActivityLog { get; set; } + + /// + /// Gets or sets specifies settings for copy activity log. + /// + [JsonProperty(PropertyName = "copyActivityLogSettings")] + public CopyActivityLogSettings CopyActivityLogSettings { get; set; } + + /// + /// Gets or sets log location settings customer needs to provide when + /// enabling log. + /// + [JsonProperty(PropertyName = "logLocationSettings")] + public LogLocationSettings LogLocationSettings { get; set; } + + /// + /// Validate the object. + /// + /// + /// Thrown if validation fails + /// + public virtual void Validate() + { + if (LogLocationSettings == null) + { + throw new ValidationException(ValidationRules.CannotBeNull, "LogLocationSettings"); + } + if (LogLocationSettings != null) + { + LogLocationSettings.Validate(); + } + } + } +} diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/LogStorageSettings.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/LogStorageSettings.cs index 777cc15863a8..92ae1462a169 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/LogStorageSettings.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/LogStorageSettings.cs @@ -17,7 +17,7 @@ namespace Microsoft.Azure.Management.DataFactory.Models using System.Linq; /// - /// Log storage settings. + /// (Deprecated. Please use LogSettings) Log storage settings. /// public partial class LogStorageSettings { diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj index 72b1393e49c4..a330e93da586 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj @@ -11,6 +11,9 @@ diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs index 0f9f191d1f3c..ae70c6421df8 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs @@ -1384,6 +1384,30 @@ public class LinkedServiceJsonSamples : JsonSampleCollection } }"; + [JsonSample] + public const string CopyActivity_DelimitedText_AdlsWithlogSettings = @"{ + ""name"": ""MyPipeline"", + ""properties"": { + ""activities"": [ + { + ""type"": ""Copy"", + ""typeProperties"": { + ""source"": { + ""type"": ""DelimitedTextSource"", + ""storeSettings"": { + ""type"": ""AzureDataLakeStoreReadSettings"", + ""recursive"": true, + ""enablePartitionDiscovery"": true + }, + ""formatSettings"": { + ""type"": ""DelimitedTextReadSettings"", + ""skipLineCount"": 10, + ""additionalNullValues"": [ ""\\N"", ""NULL"" ] + }, + ""additionalColumns"": [ + { + ""name"": ""clmn"", + ""value"": ""$$FILEPATH"" + } + ] + }, + ""sink"": { + ""type"": ""DelimitedTextSink"", + ""storeSettings"": { + ""type"": ""AzureDataLakeStoreWriteSettings"", + ""maxConcurrentConnections"": 3, + ""copyBehavior"": ""PreserveHierarchy"" + }, + ""formatSettings"": { + ""type"": ""DelimitedTextWriteSettings"", + ""quoteAllText"": true, + ""fileExtension"": "".csv"", + ""maxRowsPerFile"": 10, + ""fileNamePrefix"": ""orcSinkFile"" + } + }, + ""validateDataConsistency"": true, + ""skipErrorFile"": { + ""fileMissing"": true, + ""dataInconsistency"": true + }, + ""logSettings"": { + ""enableCopyActivityLog"": true, + ""copyActivityLogSettings"": { + ""logLevel"": ""Info"", + ""enableReliableLogging"": true + }, + ""logLocationSettings"": { + ""linkedServiceName"": { + ""referenceName"": ""exampleLinkedService"", + ""type"": ""LinkedServiceReference"" + }, + ""path"": ""test"" + } + } + }, + ""inputs"": [ + { + ""referenceName"": ""exampleDataset"", + ""type"": ""DatasetReference"" + } + ], + ""outputs"": [ + { + ""referenceName"": ""exampleDataset"", + ""type"": ""DatasetReference"" + } + ], + } + ] + } +}"; + [JsonSample] public const string CopyActivity_DelimitedText_AzureBlob = @"{ ""properties"": {