diff --git a/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt b/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt index 428bcf18b1db..4f2736227720 100644 --- a/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt +++ b/eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt @@ -4,11 +4,11 @@ Commencing code generation Generating CSharp code Executing AutoRest command cmd.exe /c autorest.cmd https://github.com/Azure/azure-rest-api-specs/blob/master/specification/datafactory/resource-manager/readme.md --csharp --version=v2 --reflect-api-versions --tag=package-2018-06 --csharp-sdks-folder=D:\Projects\azure-sdk-for-net\sdk -2020-07-16 04:32:25 UTC +2020-07-24 04:49:02 UTC Azure-rest-api-specs repository information GitHub fork: Azure Branch: master -Commit: 5268047891427110a671893d1eb076e10d463802 +Commit: 9d3e3fa953bd61b5e62101d0fc3e036461916105 AutoRest information Requested version: v2 Bootstrapper version: autorest@2.0.4413 diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md index 5ea13147f0b6..8c77a27e2c2f 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md @@ -5,6 +5,10 @@ - Added connectionProperties property to QuickBooks, Square, Xero, Zoho, SalesforceMarketingCloud linked service - Added support of Message server connecting from SAP Open Hub - Enable Sql Sources (AzureSqlDatabase, SqlSever, Synapse, SqlMI) read in parallel, support partition by dynamic range and sql built-in partitions. +- Added Rest as Sink Type in Copy Sink +- Added NewClusterLogDestination property to Azure Databricks linked service +- Support expression (object) type for data flow staging folder path +- Add new type "AzPowerShellSetup" to parameter "ExpressCustomSetup" for "AzureRmDataFactoryV2IntegrationRuntime" cmd to install Azure PowerShell ## Version 4.9.0 ### Feature Additions diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzPowerShellSetup.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzPowerShellSetup.cs new file mode 100644 index 000000000000..026165f6ba8b --- /dev/null +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzPowerShellSetup.cs @@ -0,0 +1,68 @@ +// +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for +// license information. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is +// regenerated. +// + +namespace Microsoft.Azure.Management.DataFactory.Models +{ + using Microsoft.Rest; + using Microsoft.Rest.Serialization; + using Newtonsoft.Json; + using System.Linq; + + /// + /// The express custom setup of installing Azure PowerShell. + /// + [Rest.Serialization.JsonTransformation] + public partial class AzPowerShellSetup : CustomSetupBase + { + /// + /// Initializes a new instance of the AzPowerShellSetup class. + /// + public AzPowerShellSetup() + { + CustomInit(); + } + + /// + /// Initializes a new instance of the AzPowerShellSetup class. + /// + /// The required version of Azure PowerShell to + /// install. + public AzPowerShellSetup(string version) + { + Version = version; + CustomInit(); + } + + /// + /// An initialization method that performs custom operations like setting defaults + /// + partial void CustomInit(); + + /// + /// Gets or sets the required version of Azure PowerShell to install. + /// + [JsonProperty(PropertyName = "typeProperties.version")] + public string Version { get; set; } + + /// + /// Validate the object. + /// + /// + /// Thrown if validation fails + /// + public virtual void Validate() + { + if (Version == null) + { + throw new ValidationException(ValidationRules.CannotBeNull, "Version"); + } + } + } +} diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksLinkedService.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksLinkedService.cs index 1ac3e9ad74f8..76f63b301128 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksLinkedService.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureDatabricksLinkedService.cs @@ -83,6 +83,9 @@ public AzureDatabricksLinkedService() /// Additional tags for cluster /// resources. This property is ignored in instance pool /// configurations. + /// Specify a location to + /// deliver Spark driver, worker, and event logs. Type: string (or + /// Expression with resultType string). /// The driver node type for the /// new job cluster. This property is ignored in instance pool /// configurations. Type: string (or Expression with resultType @@ -99,7 +102,7 @@ public AzureDatabricksLinkedService() /// authentication. Credentials are encrypted using the integration /// runtime credential manager. Type: string (or Expression with /// resultType string). - public AzureDatabricksLinkedService(object domain, SecretBase accessToken, IDictionary additionalProperties = default(IDictionary), IntegrationRuntimeReference connectVia = default(IntegrationRuntimeReference), string description = default(string), IDictionary parameters = default(IDictionary), IList annotations = default(IList), object existingClusterId = default(object), object instancePoolId = default(object), object newClusterVersion = default(object), object newClusterNumOfWorker = default(object), object newClusterNodeType = default(object), IDictionary newClusterSparkConf = default(IDictionary), IDictionary newClusterSparkEnvVars = default(IDictionary), IDictionary newClusterCustomTags = default(IDictionary), object newClusterDriverNodeType = default(object), object newClusterInitScripts = default(object), object newClusterEnableElasticDisk = default(object), object encryptedCredential = default(object)) + public AzureDatabricksLinkedService(object domain, SecretBase accessToken, IDictionary additionalProperties = default(IDictionary), IntegrationRuntimeReference connectVia = default(IntegrationRuntimeReference), string description = default(string), IDictionary parameters = default(IDictionary), IList annotations = default(IList), object existingClusterId = default(object), object instancePoolId = default(object), object newClusterVersion = default(object), object newClusterNumOfWorker = default(object), object newClusterNodeType = default(object), IDictionary newClusterSparkConf = default(IDictionary), IDictionary newClusterSparkEnvVars = default(IDictionary), IDictionary newClusterCustomTags = default(IDictionary), object newClusterLogDestination = default(object), object newClusterDriverNodeType = default(object), object newClusterInitScripts = default(object), object newClusterEnableElasticDisk = default(object), object encryptedCredential = default(object)) : base(additionalProperties, connectVia, description, parameters, annotations) { Domain = domain; @@ -112,6 +115,7 @@ public AzureDatabricksLinkedService() NewClusterSparkConf = newClusterSparkConf; NewClusterSparkEnvVars = newClusterSparkEnvVars; NewClusterCustomTags = newClusterCustomTags; + NewClusterLogDestination = newClusterLogDestination; NewClusterDriverNodeType = newClusterDriverNodeType; NewClusterInitScripts = newClusterInitScripts; NewClusterEnableElasticDisk = newClusterEnableElasticDisk; @@ -209,6 +213,14 @@ public AzureDatabricksLinkedService() [JsonProperty(PropertyName = "typeProperties.newClusterCustomTags")] public IDictionary NewClusterCustomTags { get; set; } + /// + /// Gets or sets specify a location to deliver Spark driver, worker, + /// and event logs. Type: string (or Expression with resultType + /// string). + /// + [JsonProperty(PropertyName = "typeProperties.newClusterLogDestination")] + public object NewClusterLogDestination { get; set; } + /// /// Gets or sets the driver node type for the new job cluster. This /// property is ignored in instance pool configurations. Type: string diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DataFlowStagingInfo.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DataFlowStagingInfo.cs index b77877ea33a0..67e7c7e5fd5d 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DataFlowStagingInfo.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DataFlowStagingInfo.cs @@ -31,8 +31,9 @@ public DataFlowStagingInfo() /// /// Staging linked service /// reference. - /// Folder path for staging blob. - public DataFlowStagingInfo(LinkedServiceReference linkedService = default(LinkedServiceReference), string folderPath = default(string)) + /// Folder path for staging blob. Type: string + /// (or Expression with resultType string) + public DataFlowStagingInfo(LinkedServiceReference linkedService = default(LinkedServiceReference), object folderPath = default(object)) { LinkedService = linkedService; FolderPath = folderPath; @@ -51,10 +52,11 @@ public DataFlowStagingInfo() public LinkedServiceReference LinkedService { get; set; } /// - /// Gets or sets folder path for staging blob. + /// Gets or sets folder path for staging blob. Type: string (or + /// Expression with resultType string) /// [JsonProperty(PropertyName = "folderPath")] - public string FolderPath { get; set; } + public object FolderPath { get; set; } /// /// Validate the object. diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/RestSink.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/RestSink.cs new file mode 100644 index 000000000000..eb28aa93e384 --- /dev/null +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/RestSink.cs @@ -0,0 +1,135 @@ +// +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for +// license information. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is +// regenerated. +// + +namespace Microsoft.Azure.Management.DataFactory.Models +{ + using Newtonsoft.Json; + using System.Collections; + using System.Collections.Generic; + using System.Linq; + + /// + /// A copy activity Rest service Sink. + /// + public partial class RestSink : CopySink + { + /// + /// Initializes a new instance of the RestSink class. + /// + public RestSink() + { + CustomInit(); + } + + /// + /// Initializes a new instance of the RestSink class. + /// + /// Unmatched properties from the + /// message are deserialized this collection + /// Write batch size. Type: integer (or + /// Expression with resultType integer), minimum: 0. + /// Write batch timeout. Type: string + /// (or Expression with resultType string), pattern: + /// ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + /// Sink retry count. Type: integer (or + /// Expression with resultType integer). + /// Sink retry wait. Type: string (or + /// Expression with resultType string), pattern: + /// ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + /// The maximum concurrent + /// connection count for the sink data store. Type: integer (or + /// Expression with resultType integer). + /// The HTTP method used to call the + /// RESTful API. The default is POST. Type: string (or Expression with + /// resultType string). + /// The additional HTTP headers in the + /// request to the RESTful API. Type: string (or Expression with + /// resultType string). + /// The timeout (TimeSpan) to get an + /// HTTP response. It is the timeout to get a response, not the timeout + /// to read response data. Default value: 00:01:40. Type: string (or + /// Expression with resultType string), pattern: + /// ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + /// The time to await before sending next + /// request, in milliseconds + /// Compression Type to Send data in + /// compressed format with Optimal Compression Level, Default is None. + /// And The Only Supported option is Gzip. + /// Wraps Request Array Json + /// into an Object before calling the rest endpoint , Default is false. + /// ex: if true request content sample format is { rows:[]} else the + /// format is [] + public RestSink(IDictionary additionalProperties = default(IDictionary), object writeBatchSize = default(object), object writeBatchTimeout = default(object), object sinkRetryCount = default(object), object sinkRetryWait = default(object), object maxConcurrentConnections = default(object), object requestMethod = default(object), object additionalHeaders = default(object), object httpRequestTimeout = default(object), object requestInterval = default(object), object compressionType = default(object), object wrapRequestJsonInAnObject = default(object)) + : base(additionalProperties, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections) + { + RequestMethod = requestMethod; + AdditionalHeaders = additionalHeaders; + HttpRequestTimeout = httpRequestTimeout; + RequestInterval = requestInterval; + CompressionType = compressionType; + WrapRequestJsonInAnObject = wrapRequestJsonInAnObject; + CustomInit(); + } + + /// + /// An initialization method that performs custom operations like setting defaults + /// + partial void CustomInit(); + + /// + /// Gets or sets the HTTP method used to call the RESTful API. The + /// default is POST. Type: string (or Expression with resultType + /// string). + /// + [JsonProperty(PropertyName = "requestMethod")] + public object RequestMethod { get; set; } + + /// + /// Gets or sets the additional HTTP headers in the request to the + /// RESTful API. Type: string (or Expression with resultType string). + /// + [JsonProperty(PropertyName = "additionalHeaders")] + public object AdditionalHeaders { get; set; } + + /// + /// Gets or sets the timeout (TimeSpan) to get an HTTP response. It is + /// the timeout to get a response, not the timeout to read response + /// data. Default value: 00:01:40. Type: string (or Expression with + /// resultType string), pattern: + /// ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + /// + [JsonProperty(PropertyName = "httpRequestTimeout")] + public object HttpRequestTimeout { get; set; } + + /// + /// Gets or sets the time to await before sending next request, in + /// milliseconds + /// + [JsonProperty(PropertyName = "requestInterval")] + public object RequestInterval { get; set; } + + /// + /// Gets or sets compression Type to Send data in compressed format + /// with Optimal Compression Level, Default is None. And The Only + /// Supported option is Gzip. + /// + [JsonProperty(PropertyName = "compressionType")] + public object CompressionType { get; set; } + + /// + /// Gets or sets wraps Request Array Json into an Object before calling + /// the rest endpoint , Default is false. ex: if true request content + /// sample format is { rows:[]} else the format is [] + /// + [JsonProperty(PropertyName = "wrapRequestJsonInAnObject")] + public object WrapRequestJsonInAnObject { get; set; } + + } +} diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj index ef092f2877cb..cc74fa17f876 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj @@ -13,6 +13,10 @@ - Added connectionProperties property to QuickBooks, Square, Xero, Zoho, SalesforceMarketingCloud linked service - Added support of Message server connecting from SAP Open Hub - Enable Sql Sources (AzureSqlDatabase, SqlSever, Synapse, SqlMI) read in parallel, support partition by dynamic range and sql built-in partitions. +- Added Rest as Sink Type in Copy Sink +- Added NewClusterLogDestination property to Azure Databricks linked service +- Support expression (object) type for data flow staging folder path +- Add new type "AzPowerShellSetup" to parameter "ExpressCustomSetup" for "AzureRmDataFactoryV2IntegrationRuntime" cmd to install Azure PowerShell ]]> diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs index 6742761b2144..2e907ad4986c 100644 --- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs +++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs @@ -2473,6 +2473,7 @@ public class LinkedServiceJsonSamples : JsonSampleCollection ] } } +"; + + [JsonSample(version: "Copy")] + public const string CopySqlToRest = @" +{ + name: ""MyPipelineName"", + properties: + { + description : ""Copy from SQL to Rest"", + activities: + [ + { + type: ""Copy"", + name: ""TestActivity"", + description: ""Test activity description"", + typeProperties: + { + source: + { + type: ""SqlSource"", + sourceRetryCount: 2, + sourceRetryWait: ""00:00:01"", + sqlReaderQuery: ""$EncryptedString$MyEncryptedQuery"", + sqlReaderStoredProcedureName: ""CopyTestSrcStoredProcedureWithParameters"", + storedProcedureParameters: { + ""stringData"": { value: ""test"", type: ""String""}, + ""id"": { value: ""3"", type: ""Int""} + }, + isolationLevel: ""ReadCommitted"" + }, + sink: + { + type: ""RestSink"", + requestMethod: ""POST"", + requestInterval: ""00:01:40"", + httpRequestTimeout: ""00:01:40"", + additionalHeaders:{ + ""Key"":""Value"" + }, + writeBatchSize: 1000, + writeBatchTimeout: ""01:00:00"", + compressionType: ""gzip"", + wrapRequestJsonInAnObject: true, + }, + translator: + { + type: ""TabularTranslator"", + columnMappings: ""PartitionKey:PartitionKey"" + } + }, + inputs: + [ + { + referenceName: ""InputSqlDA"", type: ""DatasetReference"" + } + ], + outputs: + [ + { + referenceName: ""OutputRestDA"", type: ""DatasetReference"" + } + ], + linkedServiceName: { referenceName: ""MyLinkedServiceName"", type: ""LinkedServiceReference"" }, + policy: + { + retry: 3, + timeout: ""00:00:05"", + } + } + ] + } +} "; } }