diff --git a/src/SDKs/DataFactory/DataFactory.Tests/TestData/PipelineRuns_QueryByFactory.json b/src/SDKs/DataFactory/DataFactory.Tests/TestData/PipelineRuns_QueryByFactory.json
index cbb12946813b..6f8038d886c8 100644
--- a/src/SDKs/DataFactory/DataFactory.Tests/TestData/PipelineRuns_QueryByFactory.json
+++ b/src/SDKs/DataFactory/DataFactory.Tests/TestData/PipelineRuns_QueryByFactory.json
@@ -1,69 +1,77 @@
{
- "parameters": {
- "subscriptionId": "12345678-1234-1234-1234-12345678abc",
- "resourceGroupName": "exampleResourceGroup",
- "factoryName": "exampleFactoryName",
- "filterParameters": {
- "lastUpdatedAfter": "2018-06-16T00:36:44.3345758Z",
- "lastUpdatedBefore": "2018-06-16T00:49:48.3686473Z",
- "filters": [
- {
- "operand": "PipelineName",
- "operator": "Equals",
- "values": [
- "examplePipeline"
- ]
- }
- ]
+ "parameters": {
+ "subscriptionId": "12345678-1234-1234-1234-12345678abc",
+ "resourceGroupName": "exampleResourceGroup",
+ "factoryName": "exampleFactoryName",
+ "filterParameters": {
+ "lastUpdatedAfter": "2019-02-21T23:46:23.9907022Z",
+ "lastUpdatedBefore": "2019-02-22T00:04:32.2322734Z",
+ "filters": [
+ {
+ "operand": "PipelineName",
+ "operator": "Equals",
+ "values": [
+ "examplePipeline"
+ ]
+ }
+ ]
+ },
+ "api-version": "2018-06-01"
},
- "api-version": "2018-06-01"
- },
"responses": {
"200": {
"headers": {
- "Date": "Sat, 16 Jun 2018 00:40:01 GMT",
+ "Date": "Thu, 21 Feb 2019 23:54:48 GMT",
"X-Content-Type-Options": "nosniff",
- "x-ms-ratelimit-remaining-subscription-reads": "14989",
- "x-ms-request-id": "2f955e10-c6df-45a7-97d4-81acdb8540cd",
- "x-ms-correlation-request-id": "2f955e10-c6df-45a7-97d4-81acdb8540cd"
+ "x-ms-ratelimit-remaining-subscription-reads": "11998",
+ "x-ms-request-id": "a1267e2f-f7bb-442f-b2e4-c88bd2b62557",
+ "x-ms-correlation-request-id": "a1267e2f-f7bb-442f-b2e4-c88bd2b62557"
},
"body": {
"value": [
{
- "runId": "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b",
+ "runId": "2d25e280-f4ac-4ceb-b446-4fbabff10c16",
+ "runGroupId": "2d25e280-f4ac-4ceb-b446-4fbabff10c16",
"pipelineName": "examplePipeline",
"parameters": {
- "OutputBlobNameList": "[\"exampleoutput.csv\"]"
+ "OutputBlobNameList": "[\r\n \"exampleoutput.csv\"\r\n]"
},
"invokedBy": {
- "id": "80a01654a9d34ad18b3fcac5d5d76b67",
- "name": "Manual"
+ "id": "d461d53360bb4dc1a9c42e223fd831cf",
+ "name": "Manual",
+ "invokedByType": "Manual"
},
- "runStart": "2018-06-16T00:37:44.6257014Z",
- "runEnd": "2018-06-16T00:38:12.7314495Z",
- "durationInMs": 28105,
+ "runStart": "2019-02-21T23:47:35.6829232Z",
+ "runEnd": "2019-02-21T23:47:55.9645047Z",
+ "durationInMs": 20281,
"status": "Succeeded",
"message": "",
- "lastUpdated": "2018-06-16T00:38:12.7314495Z",
- "annotations": []
+ "lastUpdated": "2019-02-21T23:47:55.9645047Z",
+ "annotations": [],
+ "runDimension": {},
+ "isLatest": false
},
{
- "runId": "16ac5348-ff82-4f95-a80d-638c1d47b721",
+ "runId": "52742a52-e3e2-4e49-8c2f-b777b23536e6",
+ "runGroupId": "2d25e280-f4ac-4ceb-b446-4fbabff10c16",
"pipelineName": "examplePipeline",
"parameters": {
- "OutputBlobNameList": "[\"exampleoutput.csv\"]"
+ "OutputBlobNameList": "[\r\n \"exampleoutput.csv\"\r\n]"
},
"invokedBy": {
- "id": "7c5fd7ef7e8a464b98b931cf15fcac66",
- "name": "Manual"
+ "id": "5190ab7f241c4849802dd9da1cbdd314",
+ "name": "Manual",
+ "invokedByType": "Manual"
},
- "runStart": "2018-06-16T00:39:49.2745128Z",
- "runEnd": null,
- "durationInMs": null,
- "status": "Cancelled",
+ "runStart": "2019-02-21T23:51:24.160864Z",
+ "runEnd": "2019-02-21T23:51:43.4680625Z",
+ "durationInMs": 19307,
+ "status": "Succeeded",
"message": "",
- "lastUpdated": "2018-06-16T00:39:51.216097Z",
- "annotations": []
+ "lastUpdated": "2019-02-21T23:51:43.4680625Z",
+ "annotations": [],
+ "runDimension": {},
+ "isLatest": true
}
]
}
diff --git a/src/SDKs/DataFactory/DataFactory.Tests/Utils/ExampleCapture.cs b/src/SDKs/DataFactory/DataFactory.Tests/Utils/ExampleCapture.cs
index d487aeeacb3f..3e1dbdabe74d 100644
--- a/src/SDKs/DataFactory/DataFactory.Tests/Utils/ExampleCapture.cs
+++ b/src/SDKs/DataFactory/DataFactory.Tests/Utils/ExampleCapture.cs
@@ -764,8 +764,11 @@ private string CapturePipelines_CreateRun()
{ "OutputBlobNameList", outputBlobNameArray }
};
- CreateRunResponse rtr = client.Pipelines.CreateRun(secrets.ResourceGroupName, secrets.FactoryName, pipelineName, parameters: arguments);
- return rtr.RunId;
+ CreateRunResponse rtr1 = client.Pipelines.CreateRun(secrets.ResourceGroupName, secrets.FactoryName, pipelineName, parameters: arguments);
+ System.Threading.Thread.Sleep(TimeSpan.FromSeconds(120));
+ CreateRunResponse rtr2 = client.Pipelines.CreateRun(secrets.ResourceGroupName, secrets.FactoryName, pipelineName,
+ isRecovery: true, referencePipelineRunId: rtr1.RunId);
+ return rtr2.RunId;
}
private void CapturePipelineRuns_Cancel()
diff --git a/src/SDKs/DataFactory/Management.DataFactory/Generated/IPipelinesOperations.cs b/src/SDKs/DataFactory/Management.DataFactory/Generated/IPipelinesOperations.cs
index bcef7b1bb75f..48d598980f38 100644
--- a/src/SDKs/DataFactory/Management.DataFactory/Generated/IPipelinesOperations.cs
+++ b/src/SDKs/DataFactory/Management.DataFactory/Generated/IPipelinesOperations.cs
@@ -158,6 +158,15 @@ public partial interface IPipelinesOperations
/// The pipeline run identifier. If run ID is specified the parameters
/// of the specified run will be used to create a new run.
///
+ ///
+ /// Recovery mode flag. If recovery mode is set to true, the specified
+ /// referenced pipeline run and the new run will be grouped under the
+ /// same groupId.
+ ///
+ ///
+ /// In recovery mode, the rerun will start from this activity. If not
+ /// specified, all activities will run.
+ ///
///
/// Parameters of the pipeline run. These parameters will be used only
/// if the runId is not specified.
@@ -177,7 +186,7 @@ public partial interface IPipelinesOperations
///
/// Thrown when a required parameter is null
///
- Task> CreateRunWithHttpMessagesAsync(string resourceGroupName, string factoryName, string pipelineName, string referencePipelineRunId = default(string), IDictionary parameters = default(IDictionary), Dictionary> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
+ Task> CreateRunWithHttpMessagesAsync(string resourceGroupName, string factoryName, string pipelineName, string referencePipelineRunId = default(string), bool? isRecovery = default(bool?), string startActivityName = default(string), IDictionary parameters = default(IDictionary), Dictionary> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
///
/// Lists pipelines.
///
diff --git a/src/SDKs/DataFactory/Management.DataFactory/Generated/Models/AmazonMWSSource.cs b/src/SDKs/DataFactory/Management.DataFactory/Generated/Models/AmazonMWSSource.cs
index 94bd9954eba5..d74a6017eea6 100644
--- a/src/SDKs/DataFactory/Management.DataFactory/Generated/Models/AmazonMWSSource.cs
+++ b/src/SDKs/DataFactory/Management.DataFactory/Generated/Models/AmazonMWSSource.cs
@@ -38,10 +38,13 @@ public AmazonMWSSource()
/// Source retry wait. Type: string (or
/// Expression with resultType string), pattern:
/// ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ /// The maximum concurrent
+ /// connection count for the source data store. Type: integer (or
+ /// Expression with resultType integer).
/// A query to retrieve data from source. Type:
/// string (or Expression with resultType string).
- public AmazonMWSSource(IDictionary additionalProperties = default(IDictionary), object sourceRetryCount = default(object), object sourceRetryWait = default(object), object query = default(object))
- : base(additionalProperties, sourceRetryCount, sourceRetryWait)
+ public AmazonMWSSource(IDictionary additionalProperties = default(IDictionary), object sourceRetryCount = default(object), object sourceRetryWait = default(object), object maxConcurrentConnections = default(object), object query = default(object))
+ : base(additionalProperties, sourceRetryCount, sourceRetryWait, maxConcurrentConnections)
{
Query = query;
CustomInit();
diff --git a/src/SDKs/DataFactory/Management.DataFactory/Generated/Models/AmazonRedshiftSource.cs b/src/SDKs/DataFactory/Management.DataFactory/Generated/Models/AmazonRedshiftSource.cs
index 47a54fe6d738..a1f13df42fab 100644
--- a/src/SDKs/DataFactory/Management.DataFactory/Generated/Models/AmazonRedshiftSource.cs
+++ b/src/SDKs/DataFactory/Management.DataFactory/Generated/Models/AmazonRedshiftSource.cs
@@ -38,6 +38,9 @@ public AmazonRedshiftSource()
/// Source retry wait. Type: string (or
/// Expression with resultType string), pattern:
/// ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ /// The maximum concurrent
+ /// connection count for the source data store. Type: integer (or
+ /// Expression with resultType integer).
/// Database query. Type: string (or Expression
/// with resultType string).
/// The Amazon S3 settings needed
@@ -45,8 +48,8 @@ public AmazonRedshiftSource()
/// unload. With this, data from Amazon Redshift source will be
/// unloaded into S3 first and then copied into the targeted sink from
/// the interim S3.
- public AmazonRedshiftSource(IDictionary additionalProperties = default(IDictionary), object sourceRetryCount = default(object), object sourceRetryWait = default(object), object query = default(object), RedshiftUnloadSettings redshiftUnloadSettings = default(RedshiftUnloadSettings))
- : base(additionalProperties, sourceRetryCount, sourceRetryWait)
+ public AmazonRedshiftSource(IDictionary additionalProperties = default(IDictionary), object sourceRetryCount = default(object), object sourceRetryWait = default(object), object maxConcurrentConnections = default(object), object query = default(object), RedshiftUnloadSettings redshiftUnloadSettings = default(RedshiftUnloadSettings))
+ : base(additionalProperties, sourceRetryCount, sourceRetryWait, maxConcurrentConnections)
{
Query = query;
RedshiftUnloadSettings = redshiftUnloadSettings;
diff --git a/src/SDKs/DataFactory/Management.DataFactory/Generated/Models/AmazonS3Dataset.cs b/src/SDKs/DataFactory/Management.DataFactory/Generated/Models/AmazonS3Dataset.cs
index eae5f6dd3d6f..028fcc579833 100644
--- a/src/SDKs/DataFactory/Management.DataFactory/Generated/Models/AmazonS3Dataset.cs
+++ b/src/SDKs/DataFactory/Management.DataFactory/Generated/Models/AmazonS3Dataset.cs
@@ -60,16 +60,24 @@ public AmazonS3Dataset()
/// Type: string (or Expression with resultType string).
/// The version for the S3 object. Type: string
/// (or Expression with resultType string).
+ /// The start of S3 object's
+ /// modified datetime. Type: string (or Expression with resultType
+ /// string).
+ /// The end of S3 object's modified
+ /// datetime. Type: string (or Expression with resultType
+ /// string).
/// The format of files.
/// The data compression method used for the
/// Amazon S3 object.
- public AmazonS3Dataset(LinkedServiceReference linkedServiceName, object bucketName, IDictionary additionalProperties = default(IDictionary), string description = default(string), object structure = default(object), object schema = default(object), IDictionary parameters = default(IDictionary), IList