diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py index ab57aa1c0bfa..864f9f3d0d34 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py @@ -23,6 +23,7 @@ from .operations.pipeline_runs_operations import PipelineRunsOperations from .operations.activity_runs_operations import ActivityRunsOperations from .operations.triggers_operations import TriggersOperations +from .operations.rerun_triggers_operations import RerunTriggersOperations from .operations.trigger_runs_operations import TriggerRunsOperations from . import models @@ -85,6 +86,8 @@ class DataFactoryManagementClient(SDKClient): :vartype activity_runs: azure.mgmt.datafactory.operations.ActivityRunsOperations :ivar triggers: Triggers operations :vartype triggers: azure.mgmt.datafactory.operations.TriggersOperations + :ivar rerun_triggers: RerunTriggers operations + :vartype rerun_triggers: azure.mgmt.datafactory.operations.RerunTriggersOperations :ivar trigger_runs: TriggerRuns operations :vartype trigger_runs: azure.mgmt.datafactory.operations.TriggerRunsOperations @@ -127,5 +130,7 @@ def __init__( self._client, self.config, self._serialize, self._deserialize) self.triggers = TriggersOperations( self._client, self.config, self._serialize, self._deserialize) + self.rerun_triggers = RerunTriggersOperations( + self._client, self.config, self._serialize, self._deserialize) self.trigger_runs = TriggerRunsOperations( self._client, self.config, self._serialize, self._deserialize) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index e758e9dae31f..cab29db469a7 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -64,6 +64,9 @@ from .activity_runs_query_response_py3 import ActivityRunsQueryResponse from .trigger_run_py3 import TriggerRun from .trigger_runs_query_response_py3 import TriggerRunsQueryResponse + from .rerun_tumbling_window_trigger_action_parameters_py3 import RerunTumblingWindowTriggerActionParameters + from .rerun_tumbling_window_trigger_py3 import RerunTumblingWindowTrigger + from .rerun_trigger_resource_py3 import RerunTriggerResource from .operation_display_py3 import OperationDisplay from .operation_log_specification_py3 import OperationLogSpecification from .operation_metric_availability_py3 import OperationMetricAvailability @@ -71,6 +74,20 @@ from .operation_metric_specification_py3 import OperationMetricSpecification from .operation_service_specification_py3 import OperationServiceSpecification from .operation_py3 import Operation + from .self_dependency_tumbling_window_trigger_reference_py3 import SelfDependencyTumblingWindowTriggerReference + from .trigger_reference_py3 import TriggerReference + from .tumbling_window_trigger_dependency_reference_py3 import TumblingWindowTriggerDependencyReference + from .trigger_dependency_reference_py3 import TriggerDependencyReference + from .dependency_reference_py3 import DependencyReference + from .retry_policy_py3 import RetryPolicy + from .tumbling_window_trigger_py3 import TumblingWindowTrigger + from .blob_events_trigger_py3 import BlobEventsTrigger + from .blob_trigger_py3 import BlobTrigger + from .recurrence_schedule_occurrence_py3 import RecurrenceScheduleOccurrence + from .recurrence_schedule_py3 import RecurrenceSchedule + from .schedule_trigger_recurrence_py3 import ScheduleTriggerRecurrence + from .schedule_trigger_py3 import ScheduleTrigger + from .multiple_pipeline_trigger_py3 import MultiplePipelineTrigger from .responsys_linked_service_py3 import ResponsysLinkedService from .azure_databricks_linked_service_py3 import AzureDatabricksLinkedService from .azure_data_lake_analytics_linked_service_py3 import AzureDataLakeAnalyticsLinkedService @@ -212,20 +229,6 @@ from .azure_table_dataset_py3 import AzureTableDataset from .azure_blob_dataset_py3 import AzureBlobDataset from .amazon_s3_dataset_py3 import AmazonS3Dataset - from .self_dependency_tumbling_window_trigger_reference_py3 import SelfDependencyTumblingWindowTriggerReference - from .trigger_reference_py3 import TriggerReference - from .tumbling_window_trigger_dependency_reference_py3 import TumblingWindowTriggerDependencyReference - from .trigger_dependency_reference_py3 import TriggerDependencyReference - from .dependency_reference_py3 import DependencyReference - from .retry_policy_py3 import RetryPolicy - from .tumbling_window_trigger_py3 import TumblingWindowTrigger - from .blob_events_trigger_py3 import BlobEventsTrigger - from .blob_trigger_py3 import BlobTrigger - from .recurrence_schedule_occurrence_py3 import RecurrenceScheduleOccurrence - from .recurrence_schedule_py3 import RecurrenceSchedule - from .schedule_trigger_recurrence_py3 import ScheduleTriggerRecurrence - from .schedule_trigger_py3 import ScheduleTrigger - from .multiple_pipeline_trigger_py3 import MultiplePipelineTrigger from .activity_policy_py3 import ActivityPolicy from .databricks_spark_python_activity_py3 import DatabricksSparkPythonActivity from .databricks_spark_jar_activity_py3 import DatabricksSparkJarActivity @@ -413,6 +416,9 @@ from .activity_runs_query_response import ActivityRunsQueryResponse from .trigger_run import TriggerRun from .trigger_runs_query_response import TriggerRunsQueryResponse + from .rerun_tumbling_window_trigger_action_parameters import RerunTumblingWindowTriggerActionParameters + from .rerun_tumbling_window_trigger import RerunTumblingWindowTrigger + from .rerun_trigger_resource import RerunTriggerResource from .operation_display import OperationDisplay from .operation_log_specification import OperationLogSpecification from .operation_metric_availability import OperationMetricAvailability @@ -420,6 +426,20 @@ from .operation_metric_specification import OperationMetricSpecification from .operation_service_specification import OperationServiceSpecification from .operation import Operation + from .self_dependency_tumbling_window_trigger_reference import SelfDependencyTumblingWindowTriggerReference + from .trigger_reference import TriggerReference + from .tumbling_window_trigger_dependency_reference import TumblingWindowTriggerDependencyReference + from .trigger_dependency_reference import TriggerDependencyReference + from .dependency_reference import DependencyReference + from .retry_policy import RetryPolicy + from .tumbling_window_trigger import TumblingWindowTrigger + from .blob_events_trigger import BlobEventsTrigger + from .blob_trigger import BlobTrigger + from .recurrence_schedule_occurrence import RecurrenceScheduleOccurrence + from .recurrence_schedule import RecurrenceSchedule + from .schedule_trigger_recurrence import ScheduleTriggerRecurrence + from .schedule_trigger import ScheduleTrigger + from .multiple_pipeline_trigger import MultiplePipelineTrigger from .responsys_linked_service import ResponsysLinkedService from .azure_databricks_linked_service import AzureDatabricksLinkedService from .azure_data_lake_analytics_linked_service import AzureDataLakeAnalyticsLinkedService @@ -561,20 +581,6 @@ from .azure_table_dataset import AzureTableDataset from .azure_blob_dataset import AzureBlobDataset from .amazon_s3_dataset import AmazonS3Dataset - from .self_dependency_tumbling_window_trigger_reference import SelfDependencyTumblingWindowTriggerReference - from .trigger_reference import TriggerReference - from .tumbling_window_trigger_dependency_reference import TumblingWindowTriggerDependencyReference - from .trigger_dependency_reference import TriggerDependencyReference - from .dependency_reference import DependencyReference - from .retry_policy import RetryPolicy - from .tumbling_window_trigger import TumblingWindowTrigger - from .blob_events_trigger import BlobEventsTrigger - from .blob_trigger import BlobTrigger - from .recurrence_schedule_occurrence import RecurrenceScheduleOccurrence - from .recurrence_schedule import RecurrenceSchedule - from .schedule_trigger_recurrence import ScheduleTriggerRecurrence - from .schedule_trigger import ScheduleTrigger - from .multiple_pipeline_trigger import MultiplePipelineTrigger from .activity_policy import ActivityPolicy from .databricks_spark_python_activity import DatabricksSparkPythonActivity from .databricks_spark_jar_activity import DatabricksSparkJarActivity @@ -714,6 +720,7 @@ from .dataset_resource_paged import DatasetResourcePaged from .pipeline_resource_paged import PipelineResourcePaged from .trigger_resource_paged import TriggerResourcePaged +from .rerun_trigger_resource_paged import RerunTriggerResourcePaged from .data_factory_management_client_enums import ( IntegrationRuntimeState, IntegrationRuntimeAutoUpdate, @@ -726,6 +733,11 @@ RunQueryOrderByField, RunQueryOrder, TriggerRunStatus, + TumblingWindowFrequency, + BlobEventTypes, + DayOfWeek, + DaysOfWeek, + RecurrenceFrequency, SparkServerType, SparkThriftTransportProtocol, SparkAuthenticationType, @@ -749,11 +761,6 @@ SybaseAuthenticationType, DatasetCompressionLevel, JsonFormatFilePattern, - TumblingWindowFrequency, - BlobEventTypes, - DayOfWeek, - DaysOfWeek, - RecurrenceFrequency, WebActivityMethod, CassandraSourceReadConsistencyLevels, StoredProcedureParameterType, @@ -831,6 +838,9 @@ 'ActivityRunsQueryResponse', 'TriggerRun', 'TriggerRunsQueryResponse', + 'RerunTumblingWindowTriggerActionParameters', + 'RerunTumblingWindowTrigger', + 'RerunTriggerResource', 'OperationDisplay', 'OperationLogSpecification', 'OperationMetricAvailability', @@ -838,6 +848,20 @@ 'OperationMetricSpecification', 'OperationServiceSpecification', 'Operation', + 'SelfDependencyTumblingWindowTriggerReference', + 'TriggerReference', + 'TumblingWindowTriggerDependencyReference', + 'TriggerDependencyReference', + 'DependencyReference', + 'RetryPolicy', + 'TumblingWindowTrigger', + 'BlobEventsTrigger', + 'BlobTrigger', + 'RecurrenceScheduleOccurrence', + 'RecurrenceSchedule', + 'ScheduleTriggerRecurrence', + 'ScheduleTrigger', + 'MultiplePipelineTrigger', 'ResponsysLinkedService', 'AzureDatabricksLinkedService', 'AzureDataLakeAnalyticsLinkedService', @@ -979,20 +1003,6 @@ 'AzureTableDataset', 'AzureBlobDataset', 'AmazonS3Dataset', - 'SelfDependencyTumblingWindowTriggerReference', - 'TriggerReference', - 'TumblingWindowTriggerDependencyReference', - 'TriggerDependencyReference', - 'DependencyReference', - 'RetryPolicy', - 'TumblingWindowTrigger', - 'BlobEventsTrigger', - 'BlobTrigger', - 'RecurrenceScheduleOccurrence', - 'RecurrenceSchedule', - 'ScheduleTriggerRecurrence', - 'ScheduleTrigger', - 'MultiplePipelineTrigger', 'ActivityPolicy', 'DatabricksSparkPythonActivity', 'DatabricksSparkJarActivity', @@ -1132,6 +1142,7 @@ 'DatasetResourcePaged', 'PipelineResourcePaged', 'TriggerResourcePaged', + 'RerunTriggerResourcePaged', 'IntegrationRuntimeState', 'IntegrationRuntimeAutoUpdate', 'ParameterType', @@ -1143,6 +1154,11 @@ 'RunQueryOrderByField', 'RunQueryOrder', 'TriggerRunStatus', + 'TumblingWindowFrequency', + 'BlobEventTypes', + 'DayOfWeek', + 'DaysOfWeek', + 'RecurrenceFrequency', 'SparkServerType', 'SparkThriftTransportProtocol', 'SparkAuthenticationType', @@ -1166,11 +1182,6 @@ 'SybaseAuthenticationType', 'DatasetCompressionLevel', 'JsonFormatFilePattern', - 'TumblingWindowFrequency', - 'BlobEventTypes', - 'DayOfWeek', - 'DaysOfWeek', - 'RecurrenceFrequency', 'WebActivityMethod', 'CassandraSourceReadConsistencyLevels', 'StoredProcedureParameterType', diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py index 2de647ff9d8b..aa76fe05ddc0 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py @@ -113,6 +113,51 @@ class TriggerRunStatus(str, Enum): inprogress = "Inprogress" +class TumblingWindowFrequency(str, Enum): + + minute = "Minute" + hour = "Hour" + + +class BlobEventTypes(str, Enum): + + microsoft_storage_blob_created = "Microsoft.Storage.BlobCreated" + microsoft_storage_blob_deleted = "Microsoft.Storage.BlobDeleted" + + +class DayOfWeek(str, Enum): + + sunday = "Sunday" + monday = "Monday" + tuesday = "Tuesday" + wednesday = "Wednesday" + thursday = "Thursday" + friday = "Friday" + saturday = "Saturday" + + +class DaysOfWeek(str, Enum): + + sunday = "Sunday" + monday = "Monday" + tuesday = "Tuesday" + wednesday = "Wednesday" + thursday = "Thursday" + friday = "Friday" + saturday = "Saturday" + + +class RecurrenceFrequency(str, Enum): + + not_specified = "NotSpecified" + minute = "Minute" + hour = "Hour" + day = "Day" + week = "Week" + month = "Month" + year = "Year" + + class SparkServerType(str, Enum): shark_server = "SharkServer" @@ -263,51 +308,6 @@ class JsonFormatFilePattern(str, Enum): array_of_objects = "arrayOfObjects" -class TumblingWindowFrequency(str, Enum): - - minute = "Minute" - hour = "Hour" - - -class BlobEventTypes(str, Enum): - - microsoft_storage_blob_created = "Microsoft.Storage.BlobCreated" - microsoft_storage_blob_deleted = "Microsoft.Storage.BlobDeleted" - - -class DayOfWeek(str, Enum): - - sunday = "Sunday" - monday = "Monday" - tuesday = "Tuesday" - wednesday = "Wednesday" - thursday = "Thursday" - friday = "Friday" - saturday = "Saturday" - - -class DaysOfWeek(str, Enum): - - sunday = "Sunday" - monday = "Monday" - tuesday = "Tuesday" - wednesday = "Wednesday" - thursday = "Thursday" - friday = "Friday" - saturday = "Saturday" - - -class RecurrenceFrequency(str, Enum): - - not_specified = "NotSpecified" - minute = "Minute" - hour = "Hour" - day = "Day" - week = "Week" - month = "Month" - year = "Year" - - class WebActivityMethod(str, Enum): get = "GET" diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource.py new file mode 100644 index 000000000000..8de6a70ecc99 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource.py @@ -0,0 +1,54 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource import SubResource + + +class RerunTriggerResource(SubResource): + """RerunTrigger resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of the rerun trigger. + :type properties: + ~azure.mgmt.datafactory.models.RerunTumblingWindowTrigger + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'RerunTumblingWindowTrigger'}, + } + + def __init__(self, **kwargs): + super(RerunTriggerResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_paged.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_paged.py new file mode 100644 index 000000000000..23d971c1082e --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class RerunTriggerResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`RerunTriggerResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[RerunTriggerResource]'} + } + + def __init__(self, *args, **kwargs): + + super(RerunTriggerResourcePaged, self).__init__(*args, **kwargs) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_py3.py new file mode 100644 index 000000000000..19814ad0d76f --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_py3.py @@ -0,0 +1,54 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource_py3 import SubResource + + +class RerunTriggerResource(SubResource): + """RerunTrigger resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of the rerun trigger. + :type properties: + ~azure.mgmt.datafactory.models.RerunTumblingWindowTrigger + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'RerunTumblingWindowTrigger'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(RerunTriggerResource, self).__init__(**kwargs) + self.properties = properties diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py new file mode 100644 index 000000000000..e66cf2feebbc --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py @@ -0,0 +1,74 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .trigger import Trigger + + +class RerunTumblingWindowTrigger(Trigger): + """Trigger that schedules pipeline reruns for all fixed time interval windows + from a requested start time to requested end time. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param type: Required. Constant filled by server. + :type type: str + :param parent_trigger: The parent trigger reference. + :type parent_trigger: object + :param requested_start_time: Required. The start time for the time period + for which restatement is initiated. Only UTC time is currently supported. + :type requested_start_time: datetime + :param requested_end_time: Required. The end time for the time period for + which restatement is initiated. Only UTC time is currently supported. + :type requested_end_time: datetime + :param max_concurrency: Required. The max number of parallel time windows + (ready for execution) for which a rerun is triggered. + :type max_concurrency: int + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'requested_start_time': {'required': True}, + 'requested_end_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, + 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, + 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(RerunTumblingWindowTrigger, self).__init__(**kwargs) + self.parent_trigger = kwargs.get('parent_trigger', None) + self.requested_start_time = kwargs.get('requested_start_time', None) + self.requested_end_time = kwargs.get('requested_end_time', None) + self.max_concurrency = kwargs.get('max_concurrency', None) + self.type = 'RerunTumblingWindowTrigger' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters.py new file mode 100644 index 000000000000..4b87f070b6be --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RerunTumblingWindowTriggerActionParameters(Model): + """Rerun tumbling window trigger Parameters. + + All required parameters must be populated in order to send to Azure. + + :param start_time: Required. The start time for the time period for which + restatement is initiated. Only UTC time is currently supported. + :type start_time: datetime + :param end_time: Required. The end time for the time period for which + restatement is initiated. Only UTC time is currently supported. + :type end_time: datetime + :param max_concurrency: Required. The max number of parallel time windows + (ready for execution) for which a rerun is triggered. + :type max_concurrency: int + """ + + _validation = { + 'start_time': {'required': True}, + 'end_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(RerunTumblingWindowTriggerActionParameters, self).__init__(**kwargs) + self.start_time = kwargs.get('start_time', None) + self.end_time = kwargs.get('end_time', None) + self.max_concurrency = kwargs.get('max_concurrency', None) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters_py3.py new file mode 100644 index 000000000000..6fadecca588b --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters_py3.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RerunTumblingWindowTriggerActionParameters(Model): + """Rerun tumbling window trigger Parameters. + + All required parameters must be populated in order to send to Azure. + + :param start_time: Required. The start time for the time period for which + restatement is initiated. Only UTC time is currently supported. + :type start_time: datetime + :param end_time: Required. The end time for the time period for which + restatement is initiated. Only UTC time is currently supported. + :type end_time: datetime + :param max_concurrency: Required. The max number of parallel time windows + (ready for execution) for which a rerun is triggered. + :type max_concurrency: int + """ + + _validation = { + 'start_time': {'required': True}, + 'end_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, + } + + def __init__(self, *, start_time, end_time, max_concurrency: int, **kwargs) -> None: + super(RerunTumblingWindowTriggerActionParameters, self).__init__(**kwargs) + self.start_time = start_time + self.end_time = end_time + self.max_concurrency = max_concurrency diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py new file mode 100644 index 000000000000..eafc3b5743a0 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py @@ -0,0 +1,74 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .trigger_py3 import Trigger + + +class RerunTumblingWindowTrigger(Trigger): + """Trigger that schedules pipeline reruns for all fixed time interval windows + from a requested start time to requested end time. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param type: Required. Constant filled by server. + :type type: str + :param parent_trigger: The parent trigger reference. + :type parent_trigger: object + :param requested_start_time: Required. The start time for the time period + for which restatement is initiated. Only UTC time is currently supported. + :type requested_start_time: datetime + :param requested_end_time: Required. The end time for the time period for + which restatement is initiated. Only UTC time is currently supported. + :type requested_end_time: datetime + :param max_concurrency: Required. The max number of parallel time windows + (ready for execution) for which a rerun is triggered. + :type max_concurrency: int + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'requested_start_time': {'required': True}, + 'requested_end_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, + 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, + 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + } + + def __init__(self, *, requested_start_time, requested_end_time, max_concurrency: int, additional_properties=None, description: str=None, parent_trigger=None, **kwargs) -> None: + super(RerunTumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, **kwargs) + self.parent_trigger = parent_trigger + self.requested_start_time = requested_start_time + self.requested_end_time = requested_end_time + self.max_concurrency = max_concurrency + self.type = 'RerunTumblingWindowTrigger' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py index 0e7882159e95..398402178ae4 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py @@ -17,7 +17,8 @@ class Trigger(Model): pipeline run. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: TumblingWindowTrigger, MultiplePipelineTrigger + sub-classes are: RerunTumblingWindowTrigger, TumblingWindowTrigger, + MultiplePipelineTrigger Variables are only populated by the server, and will be ignored when sending a request. @@ -51,7 +52,7 @@ class Trigger(Model): } _subtype_map = { - 'type': {'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} + 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} } def __init__(self, **kwargs): diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py index 3e232b149f0b..09fb39534be1 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py @@ -17,7 +17,8 @@ class Trigger(Model): pipeline run. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: TumblingWindowTrigger, MultiplePipelineTrigger + sub-classes are: RerunTumblingWindowTrigger, TumblingWindowTrigger, + MultiplePipelineTrigger Variables are only populated by the server, and will be ignored when sending a request. @@ -51,7 +52,7 @@ class Trigger(Model): } _subtype_map = { - 'type': {'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} + 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} } def __init__(self, *, additional_properties=None, description: str=None, **kwargs) -> None: diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py index b6b9497ae922..a987d7ddfec7 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py @@ -19,6 +19,7 @@ from .pipeline_runs_operations import PipelineRunsOperations from .activity_runs_operations import ActivityRunsOperations from .triggers_operations import TriggersOperations +from .rerun_triggers_operations import RerunTriggersOperations from .trigger_runs_operations import TriggerRunsOperations __all__ = [ @@ -32,5 +33,6 @@ 'PipelineRunsOperations', 'ActivityRunsOperations', 'TriggersOperations', + 'RerunTriggersOperations', 'TriggerRunsOperations', ] diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/rerun_triggers_operations.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/rerun_triggers_operations.py new file mode 100644 index 000000000000..58e0066a60dd --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/rerun_triggers_operations.py @@ -0,0 +1,450 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError +from msrest.polling import LROPoller, NoPolling +from msrestazure.polling.arm_polling import ARMPolling + +from .. import models + + +class RerunTriggersOperations(object): + """RerunTriggersOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version. Constant value: "2018-06-01". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2018-06-01" + + self.config = config + + def create( + self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, rerun_tumbling_window_trigger_action_parameters, custom_headers=None, raw=False, **operation_config): + """Creates a rerun trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param rerun_trigger_name: The rerun trigger name. + :type rerun_trigger_name: str + :param rerun_tumbling_window_trigger_action_parameters: Rerun tumbling + window trigger action parameters. + :type rerun_tumbling_window_trigger_action_parameters: + ~azure.mgmt.datafactory.models.RerunTumblingWindowTriggerActionParameters + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: TriggerResource or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.TriggerResource or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.create.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + 'rerunTriggerName': self._serialize.url("rerun_trigger_name", rerun_trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(rerun_tumbling_window_trigger_action_parameters, 'RerunTumblingWindowTriggerActionParameters') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('TriggerResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers/{rerunTriggerName}'} + + + def _start_initial( + self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.start.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + 'rerunTriggerName': self._serialize.url("rerun_trigger_name", rerun_trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def start( + self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Starts a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param rerun_trigger_name: The rerun trigger name. + :type rerun_trigger_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True + :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + :raises: :class:`CloudError` + """ + raw_result = self._start_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + rerun_trigger_name=rerun_trigger_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers/{rerunTriggerName}/start'} + + + def _stop_initial( + self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.stop.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + 'rerunTriggerName': self._serialize.url("rerun_trigger_name", rerun_trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def stop( + self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Stops a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param rerun_trigger_name: The rerun trigger name. + :type rerun_trigger_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True + :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + :raises: :class:`CloudError` + """ + raw_result = self._stop_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + rerun_trigger_name=rerun_trigger_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers/{rerunTriggerName}/stop'} + + + def _cancel_initial( + self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.cancel.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + 'rerunTriggerName': self._serialize.url("rerun_trigger_name", rerun_trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def cancel( + self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Cancels a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param rerun_trigger_name: The rerun trigger name. + :type rerun_trigger_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True + :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + :raises: :class:`CloudError` + """ + raw_result = self._cancel_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + rerun_trigger_name=rerun_trigger_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers/{rerunTriggerName}/cancel'} + + def list_by_trigger( + self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, **operation_config): + """Lists rerun triggers by an original trigger name. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of RerunTriggerResource + :rtype: + ~azure.mgmt.datafactory.models.RerunTriggerResourcePaged[~azure.mgmt.datafactory.models.RerunTriggerResource] + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = self.list_by_trigger.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.RerunTriggerResourcePaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.RerunTriggerResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + list_by_trigger.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers'}