diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md
index 8c77a27e2c2f..50e876a11ae4 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md
@@ -1,5 +1,13 @@
# Changelog for the Azure Data Factory V2 .NET SDK
+## Version 4.11.0
+### Feature Additions
+- Added support for Trigger Run Cancel API
+- Added column delimiter for SAP Table/open hub
+- Added azureCloudType for Aad Auth
+- Added snapshot to Azure File Storage linked service
+- Added source/sink level staging support in dataflow
+
## Version 4.10.0
### Feature Additions
- Added connectionProperties property to QuickBooks, Square, Xero, Zoho, SalesforceMarketingCloud linked service
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureFileStorageWriteSettings.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureFileStorageWriteSettings.cs
new file mode 100644
index 000000000000..10849e9db258
--- /dev/null
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/AzureFileStorageWriteSettings.cs
@@ -0,0 +1,54 @@
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License. See License.txt in the project root for
+// license information.
+//
+// Code generated by Microsoft (R) AutoRest Code Generator.
+// Changes may cause incorrect behavior and will be lost if the code is
+// regenerated.
+//
+
+namespace Microsoft.Azure.Management.DataFactory.Models
+{
+ using System.Collections;
+ using System.Collections.Generic;
+ using System.Linq;
+
+ ///
+ /// Azure File Storage write settings.
+ ///
+ public partial class AzureFileStorageWriteSettings : StoreWriteSettings
+ {
+ ///
+ /// Initializes a new instance of the AzureFileStorageWriteSettings
+ /// class.
+ ///
+ public AzureFileStorageWriteSettings()
+ {
+ CustomInit();
+ }
+
+ ///
+ /// Initializes a new instance of the AzureFileStorageWriteSettings
+ /// class.
+ ///
+ /// Unmatched properties from the
+ /// message are deserialized this collection
+ /// The maximum concurrent
+ /// connection count for the source data store. Type: integer (or
+ /// Expression with resultType integer).
+ /// The type of copy behavior for copy
+ /// sink.
+ public AzureFileStorageWriteSettings(IDictionary additionalProperties = default(IDictionary), object maxConcurrentConnections = default(object), object copyBehavior = default(object))
+ : base(additionalProperties, maxConcurrentConnections, copyBehavior)
+ {
+ CustomInit();
+ }
+
+ ///
+ /// An initialization method that performs custom operations like setting defaults
+ ///
+ partial void CustomInit();
+
+ }
+}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DataFlowSink.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DataFlowSink.cs
index 7fde44a13732..5e73cd8cb403 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DataFlowSink.cs
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DataFlowSink.cs
@@ -35,12 +35,15 @@ public DataFlowSink()
/// Linked service reference.
/// Schema linked service
/// reference.
- public DataFlowSink(string name, string description = default(string), DatasetReference dataset = default(DatasetReference), LinkedServiceReference linkedService = default(LinkedServiceReference), LinkedServiceReference schemaLinkedService = default(LinkedServiceReference))
+ /// Staging info for execute data flow
+ /// activity.
+ public DataFlowSink(string name, string description = default(string), DatasetReference dataset = default(DatasetReference), LinkedServiceReference linkedService = default(LinkedServiceReference), LinkedServiceReference schemaLinkedService = default(LinkedServiceReference), DataFlowStagingInfo staging = default(DataFlowStagingInfo))
: base(name, description)
{
Dataset = dataset;
LinkedService = linkedService;
SchemaLinkedService = schemaLinkedService;
+ Staging = staging;
CustomInit();
}
@@ -67,6 +70,12 @@ public DataFlowSink()
[JsonProperty(PropertyName = "schemaLinkedService")]
public LinkedServiceReference SchemaLinkedService { get; set; }
+ ///
+ /// Gets or sets staging info for execute data flow activity.
+ ///
+ [JsonProperty(PropertyName = "staging")]
+ public DataFlowStagingInfo Staging { get; set; }
+
///
/// Validate the object.
///
@@ -88,6 +97,10 @@ public override void Validate()
{
SchemaLinkedService.Validate();
}
+ if (Staging != null)
+ {
+ Staging.Validate();
+ }
}
}
}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DataFlowSource.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DataFlowSource.cs
index 838e69cc4a25..cd976967cba1 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DataFlowSource.cs
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/DataFlowSource.cs
@@ -35,12 +35,15 @@ public DataFlowSource()
/// Linked service reference.
/// Schema linked service
/// reference.
- public DataFlowSource(string name, string description = default(string), DatasetReference dataset = default(DatasetReference), LinkedServiceReference linkedService = default(LinkedServiceReference), LinkedServiceReference schemaLinkedService = default(LinkedServiceReference))
+ /// Staging info for execute data flow
+ /// activity.
+ public DataFlowSource(string name, string description = default(string), DatasetReference dataset = default(DatasetReference), LinkedServiceReference linkedService = default(LinkedServiceReference), LinkedServiceReference schemaLinkedService = default(LinkedServiceReference), DataFlowStagingInfo staging = default(DataFlowStagingInfo))
: base(name, description)
{
Dataset = dataset;
LinkedService = linkedService;
SchemaLinkedService = schemaLinkedService;
+ Staging = staging;
CustomInit();
}
@@ -67,6 +70,12 @@ public DataFlowSource()
[JsonProperty(PropertyName = "schemaLinkedService")]
public LinkedServiceReference SchemaLinkedService { get; set; }
+ ///
+ /// Gets or sets staging info for execute data flow activity.
+ ///
+ [JsonProperty(PropertyName = "staging")]
+ public DataFlowStagingInfo Staging { get; set; }
+
///
/// Validate the object.
///
@@ -88,6 +97,10 @@ public override void Validate()
{
SchemaLinkedService.Validate();
}
+ if (Staging != null)
+ {
+ Staging.Validate();
+ }
}
}
}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj
index cc74fa17f876..28cfe7cef4c7 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Microsoft.Azure.Management.DataFactory.csproj
@@ -5,18 +5,16 @@
Microsoft.Azure.Management.DataFactory
Azure Data Factory V2 is the data integration platform that goes beyond Azure Data Factory V1's orchestration and batch-processing of time-series data, with a general purpose app model supporting modern data warehousing patterns and scenarios, lift-and-shift SSIS, and data-driven SaaS applications. Compose and manage reliable and secure data integration workflows at scale. Use native ADF data connectors and Integration Runtimes to move and transform cloud and on-premises data that can be unstructured, semi-structured, and structured with Hadoop, Azure Data Lake, Spark, SQL Server, Cosmos DB and many other data platforms.
- 4.10.0
+ 4.11.0
Microsoft.Azure.Management.DataFactory
Microsoft Azure resource management;Data Factory;ADF;
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Properties/AssemblyInfo.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Properties/AssemblyInfo.cs
index 9556960550e4..f45daac9c015 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Properties/AssemblyInfo.cs
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Properties/AssemblyInfo.cs
@@ -6,8 +6,8 @@
[assembly: AssemblyTitle("Microsoft Azure Data Factory Management Library")]
[assembly: AssemblyDescription("Provides management functionality for Microsoft Azure Data Factory Resources.")]
-[assembly: AssemblyVersion("4.10.0.0")]
-[assembly: AssemblyFileVersion("4.10.0.0")]
+[assembly: AssemblyVersion("4.11.0.0")]
+[assembly: AssemblyFileVersion("4.11.0.0")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("Microsoft")]
[assembly: AssemblyProduct("Microsoft Azure .NET SDK")]
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/DataFlowJsonSamples.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/DataFlowJsonSamples.cs
index a37aa53bfd05..55f8f1321ab9 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/DataFlowJsonSamples.cs
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/DataFlowJsonSamples.cs
@@ -92,5 +92,51 @@ public class DataFlowJsonSamples : JsonSampleCollection
}
}
";
+
+ [JsonSample]
+ public const string MappingDataFlowWithSourceSinkStaging = @"
+ {
+ ""name"": ""exampleDataFlow"",
+ ""properties"": {
+ ""description"": ""Sample demo data flow to convert currencies showing usage of union, derive and conditional split transformation."",
+ ""type"": ""MappingDataFlow"",
+ ""typeProperties"": {
+ ""sources"": [
+ {
+ ""dataset"": {
+ ""referenceName"": ""CurrencyDatasetUSD"",
+ ""type"": ""DatasetReference""
+ },
+ ""name"": ""USDCurrency"",
+ ""staging"": {
+ ""linkedService"": {
+ ""referenceName"": ""blob_store_sasToken01"",
+ ""type"": ""LinkedServiceReference""
+ },
+ ""folderPath"":""testcontainer01""
+ }
+ }
+ ],
+ ""sinks"": [
+ {
+ ""dataset"": {
+ ""referenceName"": ""USDOutput"",
+ ""type"": ""DatasetReference""
+ },
+ ""name"": ""USDSink"",
+ ""staging"": {
+ ""linkedService"": {
+ ""referenceName"": ""blob_store_sasToken02"",
+ ""type"": ""LinkedServiceReference""
+ },
+ ""folderPath"":""testcontainer02""
+ }
+ }
+ ],
+ ""script"": ""some script""
+ }
+ }
+ }
+";
}
}
diff --git a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs
index 2e907ad4986c..fc970af02d2c 100644
--- a/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs
+++ b/sdk/datafactory/Microsoft.Azure.Management.DataFactory/tests/JsonSamples/LinkedServiceJsonSamples.cs
@@ -11,6 +11,27 @@ namespace DataFactory.Tests.JsonSamples
///
public class LinkedServiceJsonSamples : JsonSampleCollection
{
+ [JsonSample]
+ public const string AzureBlobFSLinkedService = @"
+{
+ name: ""Test-Windows-Azure-storage-account-linkedService"",
+ properties:
+ {
+ type: ""AzureBlobFS"",
+ typeProperties:
+ {
+ url: ""https://testblobfs.dfs.core.windows.net"",
+ servicePrincipalId: ""9c8b1ab1-a894-4639-8fb9-75f98a36e9ab"",
+ servicePrincipalKey: {
+ type: ""SecureString"",
+ value: ""mykey""
+ },
+ tenant: ""72f988bf-86f1-41af-91ab-2d7cd011db47"",
+ azureCloudType: ""AzurePublic""
+ }
+ }
+}";
+
[JsonSample]
public const string AzureStorageLinkedService = @"
{
@@ -133,9 +154,10 @@ public class LinkedServiceJsonSamples : JsonSampleCollection
{
type: ""SapOpenHubSource"",
excludeLastRequest: false,
- baseRequestId: ""123""
+ baseRequestId: ""123"",
+ customRfcReadTableFunctionModule: ""fakecustomRfcReadTableFunctionModule"",
+ sapDataColumnDelimiter: ""|""
},
sink:
{
@@ -5587,6 +5589,7 @@ public class PipelineJsonSamples : JsonSampleCollection
{
type: ""SapTableSource"",
rowCount: 3,
+ sapDataColumnDelimiter: ""|"",
partitionOption: ""PartitionOnCalendarDate"",
partitionSettings:
{