diff --git a/src/datafactory/HISTORY.rst b/src/datafactory/HISTORY.rst
index f4e5240e156..1c139576ba0 100644
--- a/src/datafactory/HISTORY.rst
+++ b/src/datafactory/HISTORY.rst
@@ -3,28 +3,6 @@
Release History
===============
-0.4.0
-+++++
-* GA the whole module
-
-0.3.0
-+++++
-* [BREAKING CHANGE] Renamed command subgroup `az datafactory factory` to `az datafactory`.
-* [BREAKING CHANGE] `az datafactory integration-runtime managed create`: `--type-properties-compute-properties` renamed to `--compute-properties`,
- `--type-properties-ssis-properties` renamed to `--ssis-properties`.
-* [BREAKING CHANGE] `az datafactory integration-runtime self-hosted create`: `--type-properties-linked-info` renamed to `--linked-info`.
-* [BREAKING CHANGE] `az datafactory integration-runtime update`: `--properties` renamed to `--linked-service`.
-* [BREAKING CHANGE] `az datafactory linked-service delete`: `--properties` renamed to `--dataset`.
-* [BREAKING CHANGE] `az datafactory trigger list`: `--properties` renamed to `--trigger`.
-
-0.2.1
-+++++
-* az datafactory factory create: Enable managed identity by default
-
-0.2.0
-++++++
-* add update command for linked services and triggers and datasets
-
0.1.0
++++++
* Initial release.
diff --git a/src/datafactory/azext_datafactory/azext_metadata.json b/src/datafactory/azext_datafactory/azext_metadata.json
index 3695b0d7077..cfc30c747c7 100644
--- a/src/datafactory/azext_datafactory/azext_metadata.json
+++ b/src/datafactory/azext_datafactory/azext_metadata.json
@@ -1,3 +1,4 @@
{
+ "azext.isExperimental": true,
"azext.minCliCoreVersion": "2.15.0"
}
\ No newline at end of file
diff --git a/src/datafactory/azext_datafactory/generated/_client_factory.py b/src/datafactory/azext_datafactory/generated/_client_factory.py
index 7db87b484da..837a01be1e4 100644
--- a/src/datafactory/azext_datafactory/generated/_client_factory.py
+++ b/src/datafactory/azext_datafactory/generated/_client_factory.py
@@ -54,3 +54,15 @@ def cf_trigger(cli_ctx, *_):
def cf_trigger_run(cli_ctx, *_):
return cf_datafactory_cl(cli_ctx).trigger_runs
+
+
+def cf_private_end_point_connection(cli_ctx, *_):
+ return cf_datafactory_cl(cli_ctx).private_end_point_connections
+
+
+def cf_private_endpoint_connection(cli_ctx, *_):
+ return cf_datafactory_cl(cli_ctx).private_endpoint_connection
+
+
+def cf_private_link_resource(cli_ctx, *_):
+ return cf_datafactory_cl(cli_ctx).private_link_resources
diff --git a/src/datafactory/azext_datafactory/generated/_help.py b/src/datafactory/azext_datafactory/generated/_help.py
index fd2ab1dcd0e..202eeba1c85 100644
--- a/src/datafactory/azext_datafactory/generated/_help.py
+++ b/src/datafactory/azext_datafactory/generated/_help.py
@@ -42,33 +42,14 @@
type: command
short-summary: "Create a factory."
parameters:
- - name: --factory-vsts-configuration
- short-summary: "Factory's VSTS repo information."
+ - name: --identity
+ short-summary: "User assigned identity to use to authenticate to customer's key vault. If not provided Managed \
+Service Identity will be used."
long-summary: |
- Usage: --factory-vsts-configuration project-name=XX tenant-id=XX type=XX account-name=XX \
-repository-name=XX collaboration-branch=XX root-folder=XX last-commit-id=XX
-
- project-name: Required. VSTS project name.
- tenant-id: VSTS tenant id.
- type: Required. Type of repo configuration.
- account-name: Required. Account name.
- repository-name: Required. Repository name.
- collaboration-branch: Required. Collaboration branch.
- root-folder: Required. Root folder.
- last-commit-id: Last commit id.
- - name: --factory-git-hub-configuration
- short-summary: "Factory's GitHub repo information."
- long-summary: |
- Usage: --factory-git-hub-configuration host-name=XX type=XX account-name=XX repository-name=XX \
-collaboration-branch=XX root-folder=XX last-commit-id=XX
+ Usage: --identity user-assigned-identity=XX
- host-name: GitHub Enterprise host name. For example: https://github.mydomain.com
- type: Required. Type of repo configuration.
- account-name: Required. Account name.
- repository-name: Required. Repository name.
- collaboration-branch: Required. Collaboration branch.
- root-folder: Required. Root folder.
- last-commit-id: Last commit id.
+ user-assigned-identity: The resource id of the user assigned identity to authenticate to customer's key \
+vault.
examples:
- name: Factories_CreateOrUpdate
text: |-
@@ -98,41 +79,14 @@
helps['datafactory configure-factory-repo'] = """
type: command
short-summary: "Updates a factory's repo information."
- parameters:
- - name: --factory-vsts-configuration
- short-summary: "Factory's VSTS repo information."
- long-summary: |
- Usage: --factory-vsts-configuration project-name=XX tenant-id=XX type=XX account-name=XX \
-repository-name=XX collaboration-branch=XX root-folder=XX last-commit-id=XX
-
- project-name: Required. VSTS project name.
- tenant-id: VSTS tenant id.
- type: Required. Type of repo configuration.
- account-name: Required. Account name.
- repository-name: Required. Repository name.
- collaboration-branch: Required. Collaboration branch.
- root-folder: Required. Root folder.
- last-commit-id: Last commit id.
- - name: --factory-git-hub-configuration
- short-summary: "Factory's GitHub repo information."
- long-summary: |
- Usage: --factory-git-hub-configuration host-name=XX type=XX account-name=XX repository-name=XX \
-collaboration-branch=XX root-folder=XX last-commit-id=XX
-
- host-name: GitHub Enterprise host name. For example: https://github.mydomain.com
- type: Required. Type of repo configuration.
- account-name: Required. Account name.
- repository-name: Required. Repository name.
- collaboration-branch: Required. Collaboration branch.
- root-folder: Required. Root folder.
- last-commit-id: Last commit id.
examples:
- name: Factories_ConfigureFactoryRepo
text: |-
az datafactory configure-factory-repo --factory-resource-id "/subscriptions/12345678-1234-1234-1234-1234\
5678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName" \
---factory-vsts-configuration account-name="ADF" collaboration-branch="master" last-commit-id="" project-name="project" \
-repository-name="repo" root-folder="/" tenant-id="" --location "East US"
+--repo-configuration "{\\"type\\":\\"FactoryVSTSConfiguration\\",\\"accountName\\":\\"ADF\\",\\"collaborationBranch\\":\
+\\"master\\",\\"lastCommitId\\":\\"\\",\\"projectName\\":\\"project\\",\\"repositoryName\\":\\"repo\\",\\"rootFolder\\"\
+:\\"/\\",\\"tenantId\\":\\"\\"}" --location "East US"
"""
helps['datafactory get-data-plane-access'] = """
@@ -149,6 +103,14 @@
helps['datafactory get-git-hub-access-token'] = """
type: command
short-summary: "Get GitHub Access Token."
+ parameters:
+ - name: --git-hub-client-secret
+ short-summary: "GitHub bring your own app client secret information."
+ long-summary: |
+ Usage: --git-hub-client-secret byoa-secret-akv-url=XX byoa-secret-name=XX
+
+ byoa-secret-akv-url: Bring your own app client secret AKV URL.
+ byoa-secret-name: Bring your own app client secret name in AKV.
examples:
- name: Factories_GetGitHubAccessToken
text: |-
@@ -206,6 +168,13 @@
helps['datafactory integration-runtime managed create'] = """
type: command
short-summary: "Create an integration runtime."
+ parameters:
+ - name: --managed-virtual-network
+ short-summary: "Managed Virtual Network reference."
+ long-summary: |
+ Usage: --managed-virtual-network reference-name=XX
+
+ reference-name: Required. Reference ManagedVirtualNetwork name.
"""
helps['datafactory integration-runtime self-hosted'] = """
@@ -286,6 +255,16 @@
"exampleIntegrationRuntime" --resource-group "exampleResourceGroup"
"""
+helps['datafactory integration-runtime list-outbound-network-dependency-endpoint'] = """
+ type: command
+ short-summary: "Gets the list of outbound network dependencies for a given Azure-SSIS integration runtime."
+ examples:
+ - name: IntegrationRuntimes_OutboundNetworkDependenciesEndpoints
+ text: |-
+ az datafactory integration-runtime list-outbound-network-dependency-endpoint --factory-name \
+"exampleFactoryName" --name "exampleIntegrationRuntime" --resource-group "exampleResourceGroup"
+"""
+
helps['datafactory integration-runtime regenerate-auth-key'] = """
type: command
short-summary: "Regenerates the authentication key for an integration runtime."
@@ -591,7 +570,7 @@
taset\\"}],\\"typeProperties\\":{\\"dataIntegrationUnits\\":32,\\"sink\\":{\\"type\\":\\"BlobSink\\"},\\"source\\":{\\"\
type\\":\\"BlobSource\\"}}}],\\"isSequential\\":true,\\"items\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@pipeline(\
).parameters.OutputBlobNameList\\"}}}]" --parameters "{\\"OutputBlobNameList\\":{\\"type\\":\\"Array\\"}}" --duration \
-"0.00:10:00" --name "examplePipeline" --resource-group "exampleResourceGroup"
+"0.00:10:00" --pipeline-name "examplePipeline" --resource-group "exampleResourceGroup"
"""
helps['datafactory pipeline delete'] = """
@@ -902,3 +881,92 @@
az datafactory trigger-run rerun --factory-name "exampleFactoryName" --resource-group \
"exampleResourceGroup" --run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" --trigger-name "exampleTrigger"
"""
+
+helps['datafactory private-end-point-connection'] = """
+ type: group
+ short-summary: Manage private end point connection with datafactory
+"""
+
+helps['datafactory private-end-point-connection list'] = """
+ type: command
+ short-summary: "Lists Private endpoint connections."
+ examples:
+ - name: privateEndPointConnections_ListByFactory
+ text: |-
+ az datafactory private-end-point-connection list --factory-name "exampleFactoryName" --resource-group \
+"exampleResourceGroup"
+"""
+
+helps['datafactory private-endpoint-connection'] = """
+ type: group
+ short-summary: Manage private endpoint connection with datafactory
+"""
+
+helps['datafactory private-endpoint-connection show'] = """
+ type: command
+ short-summary: "Gets a private endpoint connection."
+ examples:
+ - name: Get a private endpoint connection for a datafactory.
+ text: |-
+ az datafactory private-endpoint-connection show --factory-name "exampleFactoryName" --name "connection" \
+--resource-group "exampleResourceGroup"
+"""
+
+helps['datafactory private-endpoint-connection create'] = """
+ type: command
+ short-summary: "Approves or rejects a private endpoint connection."
+ parameters:
+ - name: --private-link-service-connection-state
+ short-summary: "The state of a private link connection"
+ long-summary: |
+ Usage: --private-link-service-connection-state status=XX description=XX actions-required=XX
+
+ status: Status of a private link connection
+ description: Description of a private link connection
+ actions-required: ActionsRequired for a private link connection
+ examples:
+ - name: Approves or rejects a private endpoint connection for a factory.
+ text: |-
+ az datafactory private-endpoint-connection create --factory-name "exampleFactoryName" --name \
+"connection" --private-link-service-connection-state description="Approved by admin." actions-required="" \
+status="Approved" --resource-group "exampleResourceGroup"
+"""
+
+helps['datafactory private-endpoint-connection update'] = """
+ type: command
+ short-summary: "Approves or rejects a private endpoint connection."
+ parameters:
+ - name: --private-link-service-connection-state
+ short-summary: "The state of a private link connection"
+ long-summary: |
+ Usage: --private-link-service-connection-state status=XX description=XX actions-required=XX
+
+ status: Status of a private link connection
+ description: Description of a private link connection
+ actions-required: ActionsRequired for a private link connection
+"""
+
+helps['datafactory private-endpoint-connection delete'] = """
+ type: command
+ short-summary: "Deletes a private endpoint connection."
+ examples:
+ - name: Delete a private endpoint connection for a datafactory.
+ text: |-
+ az datafactory private-endpoint-connection delete --factory-name "exampleFactoryName" --name \
+"connection" --resource-group "exampleResourceGroup"
+"""
+
+helps['datafactory private-link-resource'] = """
+ type: group
+ short-summary: Manage private link resource with datafactory
+"""
+
+helps['datafactory private-link-resource show'] = """
+ type: command
+ short-summary: "Gets the private link resources."
+ examples:
+ - name: Get private link resources of a site
+ text: |-
+ az datafactory private-link-resource show --factory-name "exampleFactoryName" --resource-group \
+"exampleResourceGroup"
+"""
diff --git a/src/datafactory/azext_datafactory/generated/_params.py b/src/datafactory/azext_datafactory/generated/_params.py
index 2162b81c231..c5d149f29fe 100644
--- a/src/datafactory/azext_datafactory/generated/_params.py
+++ b/src/datafactory/azext_datafactory/generated/_params.py
@@ -22,11 +22,13 @@
validate_file_or_dict
)
from azext_datafactory.action import (
- AddFactoryVstsConfiguration,
- AddFactoryGitHubConfiguration,
+ AddIdentity,
+ AddGitHubClientSecret,
+ AddManagedVirtualNetwork,
AddFolder,
AddFilters,
- AddOrderBy
+ AddOrderBy,
+ AddPrivateLinkServiceConnectionState
)
@@ -51,18 +53,37 @@ def load_arguments(self, _):
c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
validator=get_default_location_from_resource_group)
c.argument('tags', tags_type)
- c.argument('factory_vsts_configuration', action=AddFactoryVstsConfiguration, nargs='+', help='Factory\'s VSTS '
- 'repo information.', arg_group='RepoConfiguration')
- c.argument('factory_git_hub_configuration', action=AddFactoryGitHubConfiguration, nargs='+', help='Factory\'s '
- 'GitHub repo information.', arg_group='RepoConfiguration')
+ c.argument('repo_configuration', type=validate_file_or_dict, help='Git repo information of the factory. '
+ 'Expected value: json-string/@json-file.')
c.argument('global_parameters', type=validate_file_or_dict, help='List of parameters for factory. Expected '
'value: json-string/@json-file.')
+ c.argument('public_network_access', arg_type=get_enum_type(['Enabled', 'Disabled']), help='Whether or not '
+ 'public network access is allowed for the data factory.')
+ c.argument('key_name', type=str, help='The name of the key in Azure Key Vault to use as Customer Managed Key.',
+ arg_group='Encryption')
+ c.argument('vault_base_url', type=str, help='The url of the Azure Key Vault used for CMK.',
+ arg_group='Encryption')
+ c.argument('key_version', type=str, help='The version of the key used for CMK. If not provided, latest version '
+ 'will be used.', arg_group='Encryption')
+ c.argument('identity', action=AddIdentity, nargs='+', help='User assigned identity to use to authenticate to '
+ 'customer\'s key vault. If not provided Managed Service Identity will be used.',
+ arg_group='Encryption')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned', 'UserAssigned',
+ 'SystemAssigned,UserAssigned']),
+ help='The identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='List of user assigned identities for '
+ 'the factory. Expected value: json-string/@json-file.', arg_group='Identity')
with self.argument_context('datafactory update') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.',
id_part='name')
c.argument('tags', tags_type)
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned', 'UserAssigned',
+ 'SystemAssigned,UserAssigned']),
+ help='The identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='List of user assigned identities for '
+ 'the factory. Expected value: json-string/@json-file.', arg_group='Identity')
with self.argument_context('datafactory delete') as c:
c.argument('resource_group_name', resource_group_name_type)
@@ -72,10 +93,8 @@ def load_arguments(self, _):
with self.argument_context('datafactory configure-factory-repo') as c:
c.argument('location', arg_type=get_location_type(self.cli_ctx), id_part='name')
c.argument('factory_resource_id', type=str, help='The factory resource id.')
- c.argument('factory_vsts_configuration', action=AddFactoryVstsConfiguration, nargs='+', help='Factory\'s VSTS '
- 'repo information.', arg_group='RepoConfiguration')
- c.argument('factory_git_hub_configuration', action=AddFactoryGitHubConfiguration, nargs='+', help='Factory\'s '
- 'GitHub repo information.', arg_group='RepoConfiguration')
+ c.argument('repo_configuration', type=validate_file_or_dict, help='Git repo information of the factory. '
+ 'Expected value: json-string/@json-file.')
with self.argument_context('datafactory get-data-plane-access') as c:
c.argument('resource_group_name', resource_group_name_type)
@@ -98,6 +117,8 @@ def load_arguments(self, _):
id_part='name')
c.argument('git_hub_access_code', type=str, help='GitHub access code.')
c.argument('git_hub_client_id', type=str, help='GitHub application client ID.')
+ c.argument('git_hub_client_secret', action=AddGitHubClientSecret, nargs='+', help='GitHub bring your own app '
+ 'client secret information.')
c.argument('git_hub_access_token_base_url', type=str, help='GitHub access token base URL.')
with self.argument_context('datafactory integration-runtime list') as c:
@@ -133,6 +154,8 @@ def load_arguments(self, _):
c.argument('if_match', type=str, help='ETag of the integration runtime entity. Should only be specified for '
'update, for which it should match existing entity or can be * for unconditional update.')
c.argument('description', type=str, help='Integration runtime description.')
+ c.argument('managed_virtual_network', action=AddManagedVirtualNetwork, nargs='+', help='Managed Virtual '
+ 'Network reference.')
c.argument('compute_properties', type=validate_file_or_dict, help='The compute resource for managed '
'integration runtime. Expected value: json-string/@json-file.', arg_group='Type Properties')
c.argument('ssis_properties', type=validate_file_or_dict, help='SSIS properties for managed integration '
@@ -190,6 +213,12 @@ def load_arguments(self, _):
c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str,
help='The integration runtime name.')
+ with self.argument_context('datafactory integration-runtime list-outbound-network-dependency-endpoint') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('factory_name', type=str, help='The factory name.')
+ c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str,
+ help='The integration runtime name.')
+
with self.argument_context('datafactory integration-runtime regenerate-auth-key') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', id_part='name')
@@ -386,8 +415,7 @@ def load_arguments(self, _):
with self.argument_context('datafactory pipeline update') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', id_part='name')
- c.argument('pipeline_name', options_list=['--name', '-n', '--pipeline-name'], type=str, help='The pipeline '
- 'name.', id_part='child_name_1')
+ c.argument('pipeline_name', type=str, help='The pipeline name.', id_part='child_name_1')
c.argument('if_match', type=str, help='ETag of the pipeline entity. Should only be specified for update, for '
'which it should match existing entity or can be * for unconditional update.')
c.argument('description', type=str, help='The description of the pipeline.')
@@ -404,8 +432,7 @@ def load_arguments(self, _):
'json-string/@json-file.')
c.argument('duration', type=validate_file_or_dict, help='TimeSpan value, after which an Azure Monitoring '
'Metric is fired. Expected value: json-string/@json-file.', arg_group='Policy Elapsed Time Metric')
- c.argument('folder_name', type=str, help='The name of the folder that this Pipeline is in.',
- arg_group='Folder')
+ c.argument('name', type=str, help='The name of the folder that this Pipeline is in.', arg_group='Folder')
c.ignore('pipeline')
with self.argument_context('datafactory pipeline delete') as c:
@@ -578,3 +605,52 @@ def load_arguments(self, _):
c.argument('factory_name', type=str, help='The factory name.', id_part='name')
c.argument('trigger_name', type=str, help='The trigger name.', id_part='child_name_1')
c.argument('run_id', type=str, help='The pipeline run identifier.', id_part='child_name_2')
+
+ with self.argument_context('datafactory private-end-point-connection list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('factory_name', type=str, help='The factory name.')
+
+ with self.argument_context('datafactory private-endpoint-connection show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('factory_name', type=str, help='The factory name.', id_part='name')
+ c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam'
+ 'e'], type=str, help='The private endpoint '
+ 'connection name.', id_part='child_name_1')
+ c.argument('if_none_match', type=str, help='ETag of the private endpoint connection entity. Should only be '
+ 'specified for get. If the ETag matches the existing entity tag, or if * was provided, then no '
+ 'content will be returned.')
+
+ with self.argument_context('datafactory private-endpoint-connection create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('factory_name', type=str, help='The factory name.')
+ c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam'
+ 'e'], type=str, help='The private endpoint '
+ 'connection name.')
+ c.argument('if_match', type=str, help='ETag of the private endpoint connection entity. Should only be '
+ 'specified for update, for which it should match existing entity or can be * for unconditional '
+ 'update.')
+ c.argument('private_link_service_connection_state', action=AddPrivateLinkServiceConnectionState, nargs='+',
+ help='The state of a private link connection')
+
+ with self.argument_context('datafactory private-endpoint-connection update') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('factory_name', type=str, help='The factory name.', id_part='name')
+ c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam'
+ 'e'], type=str, help='The private endpoint '
+ 'connection name.', id_part='child_name_1')
+ c.argument('if_match', type=str, help='ETag of the private endpoint connection entity. Should only be '
+ 'specified for update, for which it should match existing entity or can be * for unconditional '
+ 'update.')
+ c.argument('private_link_service_connection_state', action=AddPrivateLinkServiceConnectionState, nargs='+',
+ help='The state of a private link connection')
+
+ with self.argument_context('datafactory private-endpoint-connection delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('factory_name', type=str, help='The factory name.', id_part='name')
+ c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam'
+ 'e'], type=str, help='The private endpoint '
+ 'connection name.', id_part='child_name_1')
+
+ with self.argument_context('datafactory private-link-resource show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('factory_name', type=str, help='The factory name.', id_part='name')
diff --git a/src/datafactory/azext_datafactory/generated/action.py b/src/datafactory/azext_datafactory/generated/action.py
index f645d72981a..f6f712f317f 100644
--- a/src/datafactory/azext_datafactory/generated/action.py
+++ b/src/datafactory/azext_datafactory/generated/action.py
@@ -14,10 +14,10 @@
from knack.util import CLIError
-class AddFactoryVstsConfiguration(argparse.Action):
+class AddIdentity(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
- namespace.factory_vsts_configuration = action
+ namespace.identity = action
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
@@ -31,32 +31,18 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use
for k in properties:
kl = k.lower()
v = properties[k]
- if kl == 'project-name':
- d['project_name'] = v[0]
- elif kl == 'tenant-id':
- d['tenant_id'] = v[0]
- elif kl == 'account-name':
- d['account_name'] = v[0]
- elif kl == 'repository-name':
- d['repository_name'] = v[0]
- elif kl == 'collaboration-branch':
- d['collaboration_branch'] = v[0]
- elif kl == 'root-folder':
- d['root_folder'] = v[0]
- elif kl == 'last-commit-id':
- d['last_commit_id'] = v[0]
+ if kl == 'user-assigned-identity':
+ d['user_assigned_identity'] = v[0]
else:
- raise CLIError('Unsupported Key {} is provided for parameter factory_vsts_configuration. All possible '
- 'keys are: project-name, tenant-id, account-name, repository-name, '
- 'collaboration-branch, root-folder, last-commit-id'.format(k))
- d['type'] = 'FactoryVSTSConfiguration'
+ raise CLIError('Unsupported Key {} is provided for parameter identity. All possible keys are: '
+ 'user-assigned-identity'.format(k))
return d
-class AddFactoryGitHubConfiguration(argparse.Action):
+class AddGitHubClientSecret(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
- namespace.factory_git_hub_configuration = action
+ namespace.git_hub_client_secret = action
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
@@ -70,23 +56,39 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use
for k in properties:
kl = k.lower()
v = properties[k]
- if kl == 'host-name':
- d['host_name'] = v[0]
- elif kl == 'account-name':
- d['account_name'] = v[0]
- elif kl == 'repository-name':
- d['repository_name'] = v[0]
- elif kl == 'collaboration-branch':
- d['collaboration_branch'] = v[0]
- elif kl == 'root-folder':
- d['root_folder'] = v[0]
- elif kl == 'last-commit-id':
- d['last_commit_id'] = v[0]
+ if kl == 'byoa-secret-akv-url':
+ d['byoa_secret_akv_url'] = v[0]
+ elif kl == 'byoa-secret-name':
+ d['byoa_secret_name'] = v[0]
else:
- raise CLIError('Unsupported Key {} is provided for parameter factory_git_hub_configuration. All '
- 'possible keys are: host-name, account-name, repository-name, collaboration-branch, '
- 'root-folder, last-commit-id'.format(k))
- d['type'] = 'FactoryGitHubConfiguration'
+ raise CLIError('Unsupported Key {} is provided for parameter git_hub_client_secret. All possible keys '
+ 'are: byoa-secret-akv-url, byoa-secret-name'.format(k))
+ return d
+
+
+class AddManagedVirtualNetwork(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.managed_virtual_network = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ d['type'] = "ManagedVirtualNetworkReference"
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'reference-name':
+ d['reference_name'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter managed_virtual_network. All possible '
+ 'keys are: reference-name'.format(k))
return d
@@ -111,7 +113,7 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use
d['name'] = v[0]
else:
raise CLIError('Unsupported Key {} is provided for parameter folder. All possible keys are: name'.
- format(k))
+ format(k))
return d
@@ -169,3 +171,32 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use
raise CLIError('Unsupported Key {} is provided for parameter order_by. All possible keys are: '
'order-by, order'.format(k))
return d
+
+
+class AddPrivateLinkServiceConnectionState(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.private_link_service_connection_state = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'status':
+ d['status'] = v[0]
+ elif kl == 'description':
+ d['description'] = v[0]
+ elif kl == 'actions-required':
+ d['actions_required'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter private_link_service_connection_state. '
+ 'All possible keys are: status, description, actions-required'.format(k))
+ return d
diff --git a/src/datafactory/azext_datafactory/generated/commands.py b/src/datafactory/azext_datafactory/generated/commands.py
index 83b7f9db34e..eb938fdd4eb 100644
--- a/src/datafactory/azext_datafactory/generated/commands.py
+++ b/src/datafactory/azext_datafactory/generated/commands.py
@@ -20,7 +20,7 @@ def load_command_table(self, _):
operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._factories_operations#FactoriesOperatio'
'ns.{}',
client_factory=cf_factory)
- with self.command_group('datafactory', datafactory_factory, client_factory=cf_factory) as g:
+ with self.command_group('datafactory', datafactory_factory, client_factory=cf_factory, is_experimental=True) as g:
g.custom_command('list', 'datafactory_list')
g.custom_show_command('show', 'datafactory_show')
g.custom_command('create', 'datafactory_create')
@@ -49,6 +49,8 @@ def load_command_table(self, _):
g.custom_command('get-monitoring-data', 'datafactory_integration_runtime_get_monitoring_data')
g.custom_command('get-status', 'datafactory_integration_runtime_get_status')
g.custom_command('list-auth-key', 'datafactory_integration_runtime_list_auth_key')
+ g.custom_command('list-outbound-network-dependency-endpoint', 'datafactory_integration_runtime_list_outbound_ne'
+ 'twork_dependency_endpoint')
g.custom_command('regenerate-auth-key', 'datafactory_integration_runtime_regenerate_auth_key')
g.custom_command('remove-link', 'datafactory_integration_runtime_remove_link')
g.custom_command('start', 'datafactory_integration_runtime_start', supports_no_wait=True)
@@ -157,3 +159,33 @@ def load_command_table(self, _):
g.custom_command('cancel', 'datafactory_trigger_run_cancel')
g.custom_command('query-by-factory', 'datafactory_trigger_run_query_by_factory')
g.custom_command('rerun', 'datafactory_trigger_run_rerun')
+
+ from azext_datafactory.generated._client_factory import cf_private_end_point_connection
+ datafactory_private_end_point_connection = CliCommandType(
+ operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._private_end_point_connections_operatio'
+ 'ns#PrivateEndPointConnectionsOperations.{}',
+ client_factory=cf_private_end_point_connection)
+ with self.command_group('datafactory private-end-point-connection', datafactory_private_end_point_connection,
+ client_factory=cf_private_end_point_connection) as g:
+ g.custom_command('list', 'datafactory_private_end_point_connection_list')
+
+ from azext_datafactory.generated._client_factory import cf_private_endpoint_connection
+ datafactory_private_endpoint_connection = CliCommandType(
+ operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._private_endpoint_connection_operations'
+ '#PrivateEndpointConnectionOperations.{}',
+ client_factory=cf_private_endpoint_connection)
+ with self.command_group('datafactory private-endpoint-connection', datafactory_private_endpoint_connection,
+ client_factory=cf_private_endpoint_connection) as g:
+ g.custom_show_command('show', 'datafactory_private_endpoint_connection_show')
+ g.custom_command('create', 'datafactory_private_endpoint_connection_create')
+ g.custom_command('update', 'datafactory_private_endpoint_connection_update')
+ g.custom_command('delete', 'datafactory_private_endpoint_connection_delete', confirmation=True)
+
+ from azext_datafactory.generated._client_factory import cf_private_link_resource
+ datafactory_private_link_resource = CliCommandType(
+ operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._private_link_resources_operations#Priv'
+ 'ateLinkResourcesOperations.{}',
+ client_factory=cf_private_link_resource)
+ with self.command_group('datafactory private-link-resource', datafactory_private_link_resource,
+ client_factory=cf_private_link_resource) as g:
+ g.custom_show_command('show', 'datafactory_private_link_resource_show')
diff --git a/src/datafactory/azext_datafactory/generated/custom.py b/src/datafactory/azext_datafactory/generated/custom.py
index c269c1999ff..0a16a161bf7 100644
--- a/src/datafactory/azext_datafactory/generated/custom.py
+++ b/src/datafactory/azext_datafactory/generated/custom.py
@@ -11,7 +11,6 @@
# pylint: disable=too-many-lines
# pylint: disable=unused-argument
-from knack.util import CLIError
from azure.cli.core.util import sdk_no_wait
@@ -37,25 +36,29 @@ def datafactory_create(client,
if_match=None,
location=None,
tags=None,
- factory_vsts_configuration=None,
- factory_git_hub_configuration=None,
- global_parameters=None):
- all_repo_configuration = []
- if factory_vsts_configuration is not None:
- all_repo_configuration.append(factory_vsts_configuration)
- if factory_git_hub_configuration is not None:
- all_repo_configuration.append(factory_git_hub_configuration)
- if len(all_repo_configuration) > 1:
- raise CLIError('at most one of factory_vsts_configuration, factory_git_hub_configuration is needed for '
- 'repo_configuration!')
- repo_configuration = all_repo_configuration[0] if len(all_repo_configuration) == 1 else None
+ repo_configuration=None,
+ global_parameters=None,
+ public_network_access=None,
+ key_name=None,
+ vault_base_url=None,
+ key_version=None,
+ identity=None,
+ type_=None,
+ user_assigned_identities=None):
factory = {}
factory['location'] = location
factory['tags'] = tags
factory['repo_configuration'] = repo_configuration
factory['global_parameters'] = global_parameters
+ factory['public_network_access'] = public_network_access
factory['encryption'] = {}
+ factory['encryption']['key_name'] = key_name
+ factory['encryption']['vault_base_url'] = vault_base_url
+ factory['encryption']['key_version'] = key_version
+ factory['encryption']['identity'] = identity
factory['identity'] = {}
+ factory['identity']['type'] = type_
+ factory['identity']['user_assigned_identities'] = user_assigned_identities
return client.create_or_update(resource_group_name=resource_group_name,
factory_name=factory_name,
if_match=if_match,
@@ -65,10 +68,14 @@ def datafactory_create(client,
def datafactory_update(client,
resource_group_name,
factory_name,
- tags=None):
+ tags=None,
+ type_=None,
+ user_assigned_identities=None):
factory_update_parameters = {}
factory_update_parameters['tags'] = tags
factory_update_parameters['identity'] = {}
+ factory_update_parameters['identity']['type'] = type_
+ factory_update_parameters['identity']['user_assigned_identities'] = user_assigned_identities
return client.update(resource_group_name=resource_group_name,
factory_name=factory_name,
factory_update_parameters=factory_update_parameters)
@@ -84,17 +91,7 @@ def datafactory_delete(client,
def datafactory_configure_factory_repo(client,
location,
factory_resource_id=None,
- factory_vsts_configuration=None,
- factory_git_hub_configuration=None):
- all_repo_configuration = []
- if factory_vsts_configuration is not None:
- all_repo_configuration.append(factory_vsts_configuration)
- if factory_git_hub_configuration is not None:
- all_repo_configuration.append(factory_git_hub_configuration)
- if len(all_repo_configuration) > 1:
- raise CLIError('at most one of factory_vsts_configuration, factory_git_hub_configuration is needed for '
- 'repo_configuration!')
- repo_configuration = all_repo_configuration[0] if len(all_repo_configuration) == 1 else None
+ repo_configuration=None):
factory_repo_update = {}
factory_repo_update['factory_resource_id'] = factory_resource_id
factory_repo_update['repo_configuration'] = repo_configuration
@@ -126,10 +123,12 @@ def datafactory_get_git_hub_access_token(client,
factory_name,
git_hub_access_code,
git_hub_access_token_base_url,
- git_hub_client_id=None):
+ git_hub_client_id=None,
+ git_hub_client_secret=None):
git_hub_access_token_request = {}
git_hub_access_token_request['git_hub_access_code'] = git_hub_access_code
git_hub_access_token_request['git_hub_client_id'] = git_hub_client_id
+ git_hub_access_token_request['git_hub_client_secret'] = git_hub_client_secret
git_hub_access_token_request['git_hub_access_token_base_url'] = git_hub_access_token_base_url
return client.get_git_hub_access_token(resource_group_name=resource_group_name,
factory_name=factory_name,
@@ -179,12 +178,14 @@ def datafactory_integration_runtime_managed_create(client,
integration_runtime_name,
if_match=None,
description=None,
+ managed_virtual_network=None,
compute_properties=None,
ssis_properties=None):
integration_runtime = {}
integration_runtime['properties'] = {}
integration_runtime['properties']['type'] = 'Managed'
integration_runtime['properties']['description'] = description
+ integration_runtime['properties']['managed_virtual_network'] = managed_virtual_network
integration_runtime['properties']['compute_properties'] = compute_properties
integration_runtime['properties']['ssis_properties'] = ssis_properties
return client.create_or_update(resource_group_name=resource_group_name,
@@ -273,6 +274,15 @@ def datafactory_integration_runtime_list_auth_key(client,
integration_runtime_name=integration_runtime_name)
+def datafactory_integration_runtime_list_outbound_network_dependency_endpoint(client,
+ resource_group_name,
+ factory_name,
+ integration_runtime_name):
+ return client.list_outbound_network_dependencies_endpoints(resource_group_name=resource_group_name,
+ factory_name=factory_name,
+ integration_runtime_name=integration_runtime_name)
+
+
def datafactory_integration_runtime_regenerate_auth_key(client,
resource_group_name,
factory_name,
@@ -566,7 +576,7 @@ def datafactory_pipeline_update(instance,
annotations=None,
run_dimensions=None,
duration=None,
- folder_name=None):
+ name=None):
if description is not None:
instance.description = description
if activities is not None:
@@ -583,8 +593,8 @@ def datafactory_pipeline_update(instance,
instance.run_dimensions = run_dimensions
if duration is not None:
instance.elapsed_time_metric.duration = duration
- if folder_name is not None:
- instance.folder.name = folder_name
+ if name is not None:
+ instance.folder.name = name
return instance
@@ -841,3 +851,69 @@ def datafactory_trigger_run_rerun(client,
factory_name=factory_name,
trigger_name=trigger_name,
run_id=run_id)
+
+
+def datafactory_private_end_point_connection_list(client,
+ resource_group_name,
+ factory_name):
+ return client.list_by_factory(resource_group_name=resource_group_name,
+ factory_name=factory_name)
+
+
+def datafactory_private_endpoint_connection_show(client,
+ resource_group_name,
+ factory_name,
+ private_endpoint_connection_name,
+ if_none_match=None):
+ return client.get(resource_group_name=resource_group_name,
+ factory_name=factory_name,
+ private_endpoint_connection_name=private_endpoint_connection_name,
+ if_none_match=if_none_match)
+
+
+def datafactory_private_endpoint_connection_create(client,
+ resource_group_name,
+ factory_name,
+ private_endpoint_connection_name,
+ if_match=None,
+ private_link_service_connection_state=None):
+ private_endpoint_wrapper = {}
+ private_endpoint_wrapper['properties'] = {}
+ private_endpoint_wrapper['properties']['private_link_service_connection_state'] = private_link_service_connection_state
+ return client.create_or_update(resource_group_name=resource_group_name,
+ factory_name=factory_name,
+ private_endpoint_connection_name=private_endpoint_connection_name,
+ if_match=if_match,
+ private_endpoint_wrapper=private_endpoint_wrapper)
+
+
+def datafactory_private_endpoint_connection_update(client,
+ resource_group_name,
+ factory_name,
+ private_endpoint_connection_name,
+ if_match=None,
+ private_link_service_connection_state=None):
+ private_endpoint_wrapper = {}
+ private_endpoint_wrapper['properties'] = {}
+ private_endpoint_wrapper['properties']['private_link_service_connection_state'] = private_link_service_connection_state
+ return client.create_or_update(resource_group_name=resource_group_name,
+ factory_name=factory_name,
+ private_endpoint_connection_name=private_endpoint_connection_name,
+ if_match=if_match,
+ private_endpoint_wrapper=private_endpoint_wrapper)
+
+
+def datafactory_private_endpoint_connection_delete(client,
+ resource_group_name,
+ factory_name,
+ private_endpoint_connection_name):
+ return client.delete(resource_group_name=resource_group_name,
+ factory_name=factory_name,
+ private_endpoint_connection_name=private_endpoint_connection_name)
+
+
+def datafactory_private_link_resource_show(client,
+ resource_group_name,
+ factory_name):
+ return client.get(resource_group_name=resource_group_name,
+ factory_name=factory_name)
diff --git a/src/datafactory/azext_datafactory/tests/latest/example_steps.py b/src/datafactory/azext_datafactory/tests/latest/example_steps.py
index 42222d4e576..317e8bd0f72 100644
--- a/src/datafactory/azext_datafactory/tests/latest/example_steps.py
+++ b/src/datafactory/azext_datafactory/tests/latest/example_steps.py
@@ -77,8 +77,9 @@ def step_configure_factory_repo(test, rg, checks=None):
test.cmd('az datafactory configure-factory-repo '
'--factory-resource-id "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.DataFacto'
'ry/factories/{myFactory}" '
- '--factory-vsts-configuration account-name="ADF" collaboration-branch="master" last-commit-id="" '
- 'project-name="project" repository-name="repo" root-folder="/" tenant-id="" '
+ '--repo-configuration "{{\\"type\\":\\"FactoryVSTSConfiguration\\",\\"accountName\\":\\"ADF\\",\\"collabor'
+ 'ationBranch\\":\\"master\\",\\"lastCommitId\\":\\"\\",\\"projectName\\":\\"project\\",\\"repositoryName\\'
+ '":\\"repo\\",\\"rootFolder\\":\\"/\\",\\"tenantId\\":\\"\\"}}" '
'--location "East US"',
checks=checks)
@@ -232,6 +233,18 @@ def step_integration_runtime_list(test, rg, checks=None):
checks=checks)
+# EXAMPLE: /IntegrationRuntimes/get/IntegrationRuntimes_OutboundNetworkDependenciesEndpoints
+@try_manual
+def step_integration_runtime_list2(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datafactory integration-runtime list-outbound-network-dependency-endpoint '
+ '--factory-name "{myFactory}" '
+ '--name "{myIntegrationRuntime}" '
+ '--resource-group "{rg}"',
+ checks=checks)
+
+
# EXAMPLE: /IntegrationRuntimes/patch/IntegrationRuntimes_Update
@try_manual
def step_integration_runtime_update(test, rg, checks=None):
@@ -581,7 +594,7 @@ def step_pipeline_update(test, rg, checks=None):
'ipeline().parameters.OutputBlobNameList\\"}}}}}}]" '
'--parameters "{{\\"OutputBlobNameList\\":{{\\"type\\":\\"Array\\"}}}}" '
'--duration "0.00:10:00" '
- '--name "{myPipeline}" '
+ '--pipeline-name "{myPipeline}" '
'--resource-group "{rg}"',
checks=checks)
@@ -634,6 +647,66 @@ def step_pipeline_delete(test, rg, checks=None):
checks=checks)
+# EXAMPLE: /privateEndPointConnections/get/privateEndPointConnections_ListByFactory
+@try_manual
+def step_private_end_point_connection_list(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datafactory private-end-point-connection list '
+ '--factory-name "{myFactory}" '
+ '--resource-group "{rg}"',
+ checks=checks)
+
+
+# EXAMPLE: /PrivateEndpointConnection/put/Approves or rejects a private endpoint connection for a factory.
+@try_manual
+def step_private_endpoint_connection_create(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datafactory private-endpoint-connection create '
+ '--factory-name "{myFactory}" '
+ '--name "{myPrivateEndPointConnection}" '
+ '--private-link-service-connection-state description="Approved by admin." actions-required="" '
+ 'status="Approved" '
+ '--resource-group "{rg}"',
+ checks=checks)
+
+
+# EXAMPLE: /PrivateEndpointConnection/get/Get a private endpoint connection for a datafactory.
+@try_manual
+def step_private_endpoint_connection_show(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datafactory private-endpoint-connection show '
+ '--factory-name "{myFactory}" '
+ '--name "{myPrivateEndPointConnection}" '
+ '--resource-group "{rg}"',
+ checks=checks)
+
+
+# EXAMPLE: /PrivateEndpointConnection/delete/Delete a private endpoint connection for a datafactory.
+@try_manual
+def step_private_endpoint_connection_delete(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datafactory private-endpoint-connection delete -y '
+ '--factory-name "{myFactory}" '
+ '--name "{myPrivateEndPointConnection}" '
+ '--resource-group "{rg}"',
+ checks=checks)
+
+
+# EXAMPLE: /privateLinkResources/get/Get private link resources of a site
+@try_manual
+def step_private_link_resource_show(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datafactory private-link-resource show '
+ '--factory-name "{myFactory}" '
+ '--resource-group "{rg}"',
+ checks=checks)
+
+
# EXAMPLE: /Triggers/put/Triggers_Create
@try_manual
def step_trigger_create(test, rg, checks=None):
diff --git a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py
index 517a35650f8..d6795dbdb8b 100644
--- a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py
+++ b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py
@@ -174,6 +174,7 @@ def __init__(self, *args, **kwargs):
'myDataset': self.create_random_name(prefix='exampleDataset'[:7], length=14),
'myPipeline': self.create_random_name(prefix='examplePipeline'[:7], length=15),
'myTrigger': self.create_random_name(prefix='exampleTrigger'[:7], length=14),
+ 'myPrivateEndPointConnection': 'connection',
})
@ResourceGroupPreparer(name_prefix='clitestdatafactory_exampleResourceGroup'[:7], key='rg', parameter_name='rg')
diff --git a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md
deleted file mode 100644
index b7eabe4528a..00000000000
--- a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md
+++ /dev/null
@@ -1,48 +0,0 @@
-|Scenario|Result|ErrorMessage|ErrorStack|ErrorNormalized|StartDt|EndDt|
-|step_create|successed||||2021-04-26 09:05:32.308913|2021-04-26 09:05:32.501033|
-|step_update|successed||||2021-04-26 09:05:22.750754|2021-04-26 09:05:22.880707|
-|step_linked_service_create|successed||||2021-04-26 09:05:22.880707|2021-04-26 09:05:23.009706|
-|step_linked_service_update|successed||||2021-04-26 09:05:23.010706|2021-04-26 09:05:23.174579|
-|step_dataset_create|successed||||2021-04-26 09:05:23.174579|2021-04-26 09:05:23.317043|
-|step_dataset_update|successed||||2021-04-26 09:05:23.318045|2021-04-26 09:05:23.451047|
-|step_pipeline_create|successed||||2021-04-26 09:05:23.452049|2021-04-26 09:05:23.575751|
-|step_trigger_create|successed||||2021-04-26 09:05:23.703756|2021-04-26 09:05:23.871057|
-|step_trigger_update|successed||||2021-04-26 09:05:23.871057|2021-04-26 09:05:24.019053|
-|step_integration_runtime_self_hosted_create|successed||||2021-04-26 09:05:24.019053|2021-04-26 09:05:24.155099|
-|step_integration_runtime_update|successed||||2021-04-26 09:05:24.155099|2021-04-26 09:05:24.285096|
-|step_integration_runtime_show|successed||||2021-04-26 09:05:29.524820|2021-04-26 09:05:29.675815|
-|step_linked_service_show|successed||||2021-04-26 09:05:24.582291|2021-04-26 09:05:24.718292|
-|step_pipeline_show|successed||||2021-04-26 09:05:24.719291|2021-04-26 09:05:24.872517|
-|step_dataset_show|successed||||2021-04-26 09:05:24.873517|2021-04-26 09:05:25.000030|
-|step_trigger_show|successed||||2021-04-26 09:05:33.782136|2021-04-26 09:05:33.927138|
-|step_integration_runtime_list|successed||||2021-04-26 09:05:25.115003|2021-04-26 09:05:25.253055|
-|step_linked_service_list|successed||||2021-04-26 09:05:25.254059|2021-04-26 09:05:25.409635|
-|step_pipeline_list|successed||||2021-04-26 09:05:25.409635|2021-04-26 09:05:25.533704|
-|step_trigger_list|successed||||2021-04-26 09:05:25.533704|2021-04-26 09:05:25.676865|
-|step_dataset_list|successed||||2021-04-26 09:05:25.676865|2021-04-26 09:05:25.810871|
-|step_show|successed||||2021-04-26 09:05:25.810871|2021-04-26 09:05:25.938042|
-|step_list2|successed||||2021-04-26 09:05:25.938042|2021-04-26 09:05:26.060042|
-|step_list|successed||||2021-04-26 09:05:26.060042|2021-04-26 09:05:26.183196|
-|step_integration_runtime_regenerate_auth_key|successed||||2021-04-26 09:05:26.184194|2021-04-26 09:05:26.313194|
-|step_integration_runtime_sync_credentials|successed||||2021-04-26 09:05:26.314192|2021-04-26 09:05:26.449307|
-|step_integration_runtime_get_monitoring_data|successed||||2021-04-26 09:05:26.449307|2021-04-26 09:05:26.636000|
-|step_integration_runtime_list_auth_key|successed||||2021-04-26 09:05:26.636000|2021-04-26 09:05:26.790002|
-|step_integration_runtime_remove_link|successed||||2021-04-26 09:05:26.791005|2021-04-26 09:05:26.934513|
-|step_integration_runtime_get_status|successed||||2021-04-26 09:05:26.935512|2021-04-26 09:05:27.069511|
-|step_trigger_get_event_subscription_status|successed||||2021-04-26 09:05:27.069511|2021-04-26 09:05:27.211487|
-|step_trigger_unsubscribe_from_event|successed||||2021-04-26 09:05:27.212492|2021-04-26 09:05:27.402802|
-|step_trigger_subscribe_to_event|successed||||2021-04-26 09:05:27.402802|2021-04-26 09:05:27.532807|
-|step_trigger_start|successed||||2021-04-26 09:05:33.632612|2021-04-26 09:05:33.782136|
-|step_trigger_stop|successed||||2021-04-26 09:05:34.611518|2021-04-26 09:05:34.768873|
-|step_get_data_plane_access|successed||||2021-04-26 09:05:27.837090|2021-04-26 09:05:27.977072|
-|step_configure_factory_repo|successed||||2021-04-26 09:05:28.099075|2021-04-26 09:05:28.288426|
-|step_integration_runtime_delete|successed||||2021-04-26 09:05:31.965947|2021-04-26 09:05:32.140944|
-|step_trigger_delete|successed||||2021-04-26 09:05:34.768873|2021-04-26 09:05:34.900878|
-|step_pipeline_delete|successed||||2021-04-26 09:05:34.900878|2021-04-26 09:05:35.030991|
-|step_dataset_delete|successed||||2021-04-26 09:05:28.737334|2021-04-26 09:05:28.861337|
-|step_linked_service_delete|successed||||2021-04-26 09:05:28.861337|2021-04-26 09:05:28.989612|
-|step_delete|successed||||2021-04-26 09:05:35.031990|2021-04-26 09:05:35.197507|
-|step_integration_runtime_start|successed||||2021-04-26 09:05:29.676815|2021-04-26 09:05:30.373119|
-|step_integration_runtime_stop|successed||||2021-04-26 09:05:30.374118|2021-04-26 09:05:31.964925|
-|step_activity_run_query_by_pipeline_run|successed||||2021-04-26 09:05:33.012581|2021-04-26 09:05:33.193579|
-Coverage: 46/46
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py
index f272437a3e9..9dfe04b82d0 100644
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py
+++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py
@@ -107,7 +107,6 @@ def __init__(
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
- self._serialize.client_side_validation = False
self._deserialize = Deserializer(client_models)
self.operations = Operations(
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration_async.py
deleted file mode 100644
index 411d6c4a66e..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration_async.py
+++ /dev/null
@@ -1,67 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-
-from typing import Any, TYPE_CHECKING
-
-from azure.core.configuration import Configuration
-from azure.core.pipeline import policies
-from azure.mgmt.core.policies import ARMHttpLoggingPolicy
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from azure.core.credentials_async import AsyncTokenCredential
-
-VERSION = "unknown"
-
-class DataFactoryManagementClientConfiguration(Configuration):
- """Configuration for DataFactoryManagementClient.
-
- Note that all parameters used to create this instance are saved as instance
- attributes.
-
- :param credential: Credential needed for the client to connect to Azure.
- :type credential: ~azure.core.credentials_async.AsyncTokenCredential
- :param subscription_id: The subscription identifier.
- :type subscription_id: str
- """
-
- def __init__(
- self,
- credential: "AsyncTokenCredential",
- subscription_id: str,
- **kwargs: Any
- ) -> None:
- if credential is None:
- raise ValueError("Parameter 'credential' must not be None.")
- if subscription_id is None:
- raise ValueError("Parameter 'subscription_id' must not be None.")
- super(DataFactoryManagementClientConfiguration, self).__init__(**kwargs)
-
- self.credential = credential
- self.subscription_id = subscription_id
- self.api_version = "2018-06-01"
- self.credential_scopes = ['https://management.azure.com/.default']
- self.credential_scopes.extend(kwargs.pop('credential_scopes', []))
- kwargs.setdefault('sdk_moniker', 'datafactorymanagementclient/{}'.format(VERSION))
- self._configure(**kwargs)
-
- def _configure(
- self,
- **kwargs: Any
- ) -> None:
- self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
- self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
- self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
- self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
- self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
- self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
- self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
- self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
- self.authentication_policy = kwargs.get('authentication_policy')
- if self.credential and not self.authentication_policy:
- self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py
index 255a1839c21..01497b56d61 100644
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py
+++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py
@@ -104,7 +104,6 @@ def __init__(
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
- self._serialize.client_side_validation = False
self._deserialize = Deserializer(client_models)
self.operations = Operations(
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client_async.py
deleted file mode 100644
index b2b322686b8..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client_async.py
+++ /dev/null
@@ -1,143 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-
-from typing import Any, Optional, TYPE_CHECKING
-
-from azure.mgmt.core import AsyncARMPipelineClient
-from msrest import Deserializer, Serializer
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from azure.core.credentials_async import AsyncTokenCredential
-
-from ._configuration_async import DataFactoryManagementClientConfiguration
-from .operations_async import OperationOperations
-from .operations_async import FactoryOperations
-from .operations_async import ExposureControlOperations
-from .operations_async import IntegrationRuntimeOperations
-from .operations_async import IntegrationRuntimeObjectMetadataOperations
-from .operations_async import IntegrationRuntimeNodeOperations
-from .operations_async import LinkedServiceOperations
-from .operations_async import DatasetOperations
-from .operations_async import PipelineOperations
-from .operations_async import PipelineRunOperations
-from .operations_async import ActivityRunOperations
-from .operations_async import TriggerOperations
-from .operations_async import TriggerRunOperations
-from .operations_async import DataFlowOperations
-from .operations_async import DataFlowDebugSessionOperations
-from .operations_async import ManagedVirtualNetworkOperations
-from .operations_async import ManagedPrivateEndpointOperations
-from .. import models
-
-
-class DataFactoryManagementClient(object):
- """The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services.
-
- :ivar operation: OperationOperations operations
- :vartype operation: data_factory_management_client.aio.operations_async.OperationOperations
- :ivar factory: FactoryOperations operations
- :vartype factory: data_factory_management_client.aio.operations_async.FactoryOperations
- :ivar exposure_control: ExposureControlOperations operations
- :vartype exposure_control: data_factory_management_client.aio.operations_async.ExposureControlOperations
- :ivar integration_runtime: IntegrationRuntimeOperations operations
- :vartype integration_runtime: data_factory_management_client.aio.operations_async.IntegrationRuntimeOperations
- :ivar integration_runtime_object_metadata: IntegrationRuntimeObjectMetadataOperations operations
- :vartype integration_runtime_object_metadata: data_factory_management_client.aio.operations_async.IntegrationRuntimeObjectMetadataOperations
- :ivar integration_runtime_node: IntegrationRuntimeNodeOperations operations
- :vartype integration_runtime_node: data_factory_management_client.aio.operations_async.IntegrationRuntimeNodeOperations
- :ivar linked_service: LinkedServiceOperations operations
- :vartype linked_service: data_factory_management_client.aio.operations_async.LinkedServiceOperations
- :ivar dataset: DatasetOperations operations
- :vartype dataset: data_factory_management_client.aio.operations_async.DatasetOperations
- :ivar pipeline: PipelineOperations operations
- :vartype pipeline: data_factory_management_client.aio.operations_async.PipelineOperations
- :ivar pipeline_run: PipelineRunOperations operations
- :vartype pipeline_run: data_factory_management_client.aio.operations_async.PipelineRunOperations
- :ivar activity_run: ActivityRunOperations operations
- :vartype activity_run: data_factory_management_client.aio.operations_async.ActivityRunOperations
- :ivar trigger: TriggerOperations operations
- :vartype trigger: data_factory_management_client.aio.operations_async.TriggerOperations
- :ivar trigger_run: TriggerRunOperations operations
- :vartype trigger_run: data_factory_management_client.aio.operations_async.TriggerRunOperations
- :ivar data_flow: DataFlowOperations operations
- :vartype data_flow: data_factory_management_client.aio.operations_async.DataFlowOperations
- :ivar data_flow_debug_session: DataFlowDebugSessionOperations operations
- :vartype data_flow_debug_session: data_factory_management_client.aio.operations_async.DataFlowDebugSessionOperations
- :ivar managed_virtual_network: ManagedVirtualNetworkOperations operations
- :vartype managed_virtual_network: data_factory_management_client.aio.operations_async.ManagedVirtualNetworkOperations
- :ivar managed_private_endpoint: ManagedPrivateEndpointOperations operations
- :vartype managed_private_endpoint: data_factory_management_client.aio.operations_async.ManagedPrivateEndpointOperations
- :param credential: Credential needed for the client to connect to Azure.
- :type credential: ~azure.core.credentials_async.AsyncTokenCredential
- :param subscription_id: The subscription identifier.
- :type subscription_id: str
- :param str base_url: Service URL
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- """
-
- def __init__(
- self,
- credential: "AsyncTokenCredential",
- subscription_id: str,
- base_url: Optional[str] = None,
- **kwargs: Any
- ) -> None:
- if not base_url:
- base_url = 'https://management.azure.com'
- self._config = DataFactoryManagementClientConfiguration(credential, subscription_id, **kwargs)
- self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
-
- client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
- self._serialize = Serializer(client_models)
- self._deserialize = Deserializer(client_models)
-
- self.operation = OperationOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.factory = FactoryOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.exposure_control = ExposureControlOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.integration_runtime = IntegrationRuntimeOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.integration_runtime_object_metadata = IntegrationRuntimeObjectMetadataOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.integration_runtime_node = IntegrationRuntimeNodeOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.linked_service = LinkedServiceOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.dataset = DatasetOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.pipeline = PipelineOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.pipeline_run = PipelineRunOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.activity_run = ActivityRunOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.trigger = TriggerOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.trigger_run = TriggerRunOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.data_flow = DataFlowOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.data_flow_debug_session = DataFlowDebugSessionOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.managed_virtual_network = ManagedVirtualNetworkOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.managed_private_endpoint = ManagedPrivateEndpointOperations(
- self._client, self._config, self._serialize, self._deserialize)
-
- async def close(self) -> None:
- await self._client.close()
-
- async def __aenter__(self) -> "DataFactoryManagementClient":
- await self._client.__aenter__()
- return self
-
- async def __aexit__(self, *exc_details) -> None:
- await self._client.__aexit__(*exc_details)
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtimes_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtimes_operations.py
index 6b27efc1819..36c8e43bae0 100644
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtimes_operations.py
+++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtimes_operations.py
@@ -455,6 +455,68 @@ async def get_status(
return deserialized
get_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus'} # type: ignore
+ async def list_outbound_network_dependencies_endpoints(
+ self,
+ resource_group_name: str,
+ factory_name: str,
+ integration_runtime_name: str,
+ **kwargs
+ ) -> "models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse":
+ """Gets the list of outbound network dependencies for a given Azure-SSIS integration runtime.
+
+ :param resource_group_name: The resource group name.
+ :type resource_group_name: str
+ :param factory_name: The factory name.
+ :type factory_name: str
+ :param integration_runtime_name: The integration runtime name.
+ :type integration_runtime_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse, or the result of cls(response)
+ :rtype: ~data_factory_management_client.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2018-06-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_outbound_network_dependencies_endpoints.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
+ 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
+ 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_outbound_network_dependencies_endpoints.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/outboundNetworkDependenciesEndpoints'} # type: ignore
+
async def get_connection_info(
self,
resource_group_name: str,
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py
deleted file mode 100644
index 554e3ba9232..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-
-from ._operation_operations_async import OperationOperations
-from ._factory_operations_async import FactoryOperations
-from ._exposure_control_operations_async import ExposureControlOperations
-from ._integration_runtime_operations_async import IntegrationRuntimeOperations
-from ._integration_runtime_object_metadata_operations_async import IntegrationRuntimeObjectMetadataOperations
-from ._integration_runtime_node_operations_async import IntegrationRuntimeNodeOperations
-from ._linked_service_operations_async import LinkedServiceOperations
-from ._dataset_operations_async import DatasetOperations
-from ._pipeline_operations_async import PipelineOperations
-from ._pipeline_run_operations_async import PipelineRunOperations
-from ._activity_run_operations_async import ActivityRunOperations
-from ._trigger_operations_async import TriggerOperations
-from ._trigger_run_operations_async import TriggerRunOperations
-from ._data_flow_operations_async import DataFlowOperations
-from ._data_flow_debug_session_operations_async import DataFlowDebugSessionOperations
-from ._managed_virtual_network_operations_async import ManagedVirtualNetworkOperations
-from ._managed_private_endpoint_operations_async import ManagedPrivateEndpointOperations
-
-__all__ = [
- 'OperationOperations',
- 'FactoryOperations',
- 'ExposureControlOperations',
- 'IntegrationRuntimeOperations',
- 'IntegrationRuntimeObjectMetadataOperations',
- 'IntegrationRuntimeNodeOperations',
- 'LinkedServiceOperations',
- 'DatasetOperations',
- 'PipelineOperations',
- 'PipelineRunOperations',
- 'ActivityRunOperations',
- 'TriggerOperations',
- 'TriggerRunOperations',
- 'DataFlowOperations',
- 'DataFlowDebugSessionOperations',
- 'ManagedVirtualNetworkOperations',
- 'ManagedPrivateEndpointOperations',
-]
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_activity_run_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_activity_run_operations_async.py
deleted file mode 100644
index 0d2e56be08b..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_activity_run_operations_async.py
+++ /dev/null
@@ -1,127 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-import datetime
-from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class ActivityRunOperations:
- """ActivityRunOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- async def query_by_pipeline_run(
- self,
- resource_group_name: str,
- factory_name: str,
- run_id: str,
- last_updated_after: datetime.datetime,
- last_updated_before: datetime.datetime,
- continuation_token_parameter: Optional[str] = None,
- filters: Optional[List["models.RunQueryFilter"]] = None,
- order_by: Optional[List["models.RunQueryOrderBy"]] = None,
- **kwargs
- ) -> "models.ActivityRunsQueryResponse":
- """Query activity runs based on input filter conditions.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param run_id: The pipeline run identifier.
- :type run_id: str
- :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601'
- format.
- :type last_updated_after: ~datetime.datetime
- :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601'
- format.
- :type last_updated_before: ~datetime.datetime
- :param continuation_token_parameter: The continuation token for getting the next page of
- results. Null for first page.
- :type continuation_token_parameter: str
- :param filters: List of filters.
- :type filters: list[~data_factory_management_client.models.RunQueryFilter]
- :param order_by: List of OrderBy option.
- :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy]
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ActivityRunsQueryResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.ActivityRunsQueryResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ActivityRunsQueryResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.query_by_pipeline_run.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'runId': self._serialize.url("run_id", run_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(filter_parameters, 'RunFilterParameters')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('ActivityRunsQueryResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- query_by_pipeline_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/queryActivityruns'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_debug_session_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_debug_session_operations_async.py
deleted file mode 100644
index f1bf8ee8f73..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_debug_session_operations_async.py
+++ /dev/null
@@ -1,551 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union
-import warnings
-
-from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
-from azure.mgmt.core.exceptions import ARMErrorFormat
-from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class DataFlowDebugSessionOperations:
- """DataFlowDebugSessionOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- async def _create_initial(
- self,
- resource_group_name: str,
- factory_name: str,
- compute_type: Optional[str] = None,
- core_count: Optional[int] = None,
- time_to_live: Optional[int] = None,
- name: Optional[str] = None,
- properties: Optional["models.IntegrationRuntime"] = None,
- **kwargs
- ) -> Optional["models.CreateDataFlowDebugSessionResponse"]:
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.CreateDataFlowDebugSessionResponse"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- request = models.CreateDataFlowDebugSessionRequest(compute_type=compute_type, core_count=core_count, time_to_live=time_to_live, name=name, properties=properties)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self._create_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(request, 'CreateDataFlowDebugSessionRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 202]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- response_headers = {}
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('CreateDataFlowDebugSessionResponse', pipeline_response)
-
- if response.status_code == 202:
- response_headers['location']=self._deserialize('str', response.headers.get('location'))
-
- if cls:
- return cls(pipeline_response, deserialized, response_headers)
-
- return deserialized
- _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession'} # type: ignore
-
- async def begin_create(
- self,
- resource_group_name: str,
- factory_name: str,
- compute_type: Optional[str] = None,
- core_count: Optional[int] = None,
- time_to_live: Optional[int] = None,
- name: Optional[str] = None,
- properties: Optional["models.IntegrationRuntime"] = None,
- **kwargs
- ) -> AsyncLROPoller["models.CreateDataFlowDebugSessionResponse"]:
- """Creates a data flow debug session.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param compute_type: Compute type of the cluster. The value will be overwritten by the same
- setting in integration runtime if provided.
- :type compute_type: str
- :param core_count: Core count of the cluster. The value will be overwritten by the same setting
- in integration runtime if provided.
- :type core_count: int
- :param time_to_live: Time to live setting of the cluster in minutes.
- :type time_to_live: int
- :param name: The resource name.
- :type name: str
- :param properties: Integration runtime properties.
- :type properties: ~data_factory_management_client.models.IntegrationRuntime
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of AsyncLROPoller that returns either CreateDataFlowDebugSessionResponse or the result of cls(response)
- :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.CreateDataFlowDebugSessionResponse]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType["models.CreateDataFlowDebugSessionResponse"]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = await self._create_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- compute_type=compute_type,
- core_count=core_count,
- time_to_live=time_to_live,
- name=name,
- properties=properties,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- deserialized = self._deserialize('CreateDataFlowDebugSessionResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
- return deserialized
-
- if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = AsyncNoPolling()
- else: polling_method = polling
- if cont_token:
- return AsyncLROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession'} # type: ignore
-
- def query_by_factory(
- self,
- resource_group_name: str,
- factory_name: str,
- **kwargs
- ) -> AsyncIterable["models.QueryDataFlowDebugSessionsResponse"]:
- """Query all active data flow debug sessions.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either QueryDataFlowDebugSessionsResponse or the result of cls(response)
- :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.QueryDataFlowDebugSessionsResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.QueryDataFlowDebugSessionsResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.query_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.post(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('QueryDataFlowDebugSessionsResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return AsyncItemPaged(
- get_next, extract_data
- )
- query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryDataFlowDebugSessions'} # type: ignore
-
- async def add_data_flow(
- self,
- resource_group_name: str,
- factory_name: str,
- session_id: Optional[str] = None,
- datasets: Optional[List["models.DatasetDebugResource"]] = None,
- linked_services: Optional[List["models.LinkedServiceDebugResource"]] = None,
- source_settings: Optional[List["models.DataFlowSourceSetting"]] = None,
- parameters: Optional[Dict[str, object]] = None,
- dataset_parameters: Optional[object] = None,
- folder_path: Optional[object] = None,
- reference_name: Optional[str] = None,
- name: Optional[str] = None,
- properties: Optional["models.DataFlow"] = None,
- **kwargs
- ) -> "models.AddDataFlowToDebugSessionResponse":
- """Add a data flow into debug session.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param session_id: The ID of data flow debug session.
- :type session_id: str
- :param datasets: List of datasets.
- :type datasets: list[~data_factory_management_client.models.DatasetDebugResource]
- :param linked_services: List of linked services.
- :type linked_services: list[~data_factory_management_client.models.LinkedServiceDebugResource]
- :param source_settings: Source setting for data flow debug.
- :type source_settings: list[~data_factory_management_client.models.DataFlowSourceSetting]
- :param parameters: Data flow parameters.
- :type parameters: dict[str, object]
- :param dataset_parameters: Parameters for dataset.
- :type dataset_parameters: object
- :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType
- string).
- :type folder_path: object
- :param reference_name: Reference LinkedService name.
- :type reference_name: str
- :param name: The resource name.
- :type name: str
- :param properties: Data flow properties.
- :type properties: ~data_factory_management_client.models.DataFlow
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: AddDataFlowToDebugSessionResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.AddDataFlowToDebugSessionResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.AddDataFlowToDebugSessionResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- request = models.DataFlowDebugPackage(session_id=session_id, datasets=datasets, linked_services=linked_services, source_settings=source_settings, parameters_debug_settings_parameters=parameters, dataset_parameters=dataset_parameters, folder_path=folder_path, reference_name=reference_name, name=name, properties=properties)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.add_data_flow.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(request, 'DataFlowDebugPackage')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('AddDataFlowToDebugSessionResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- add_data_flow.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/addDataFlowToDebugSession'} # type: ignore
-
- async def delete(
- self,
- resource_group_name: str,
- factory_name: str,
- session_id: Optional[str] = None,
- **kwargs
- ) -> None:
- """Deletes a data flow debug session.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param session_id: The ID of data flow debug session.
- :type session_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- request = models.DeleteDataFlowDebugSessionRequest(session_id=session_id)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(request, 'DeleteDataFlowDebugSessionRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/deleteDataFlowDebugSession'} # type: ignore
-
- async def _execute_command_initial(
- self,
- resource_group_name: str,
- factory_name: str,
- session_id: Optional[str] = None,
- command: Optional[Union[str, "models.DataFlowDebugCommandType"]] = None,
- command_payload: Optional["models.DataFlowDebugCommandPayload"] = None,
- **kwargs
- ) -> Optional["models.DataFlowDebugCommandResponse"]:
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DataFlowDebugCommandResponse"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- request = models.DataFlowDebugCommandRequest(session_id=session_id, command=command, command_payload=command_payload)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self._execute_command_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(request, 'DataFlowDebugCommandRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 202]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- response_headers = {}
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('DataFlowDebugCommandResponse', pipeline_response)
-
- if response.status_code == 202:
- response_headers['location']=self._deserialize('str', response.headers.get('location'))
-
- if cls:
- return cls(pipeline_response, deserialized, response_headers)
-
- return deserialized
- _execute_command_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand'} # type: ignore
-
- async def begin_execute_command(
- self,
- resource_group_name: str,
- factory_name: str,
- session_id: Optional[str] = None,
- command: Optional[Union[str, "models.DataFlowDebugCommandType"]] = None,
- command_payload: Optional["models.DataFlowDebugCommandPayload"] = None,
- **kwargs
- ) -> AsyncLROPoller["models.DataFlowDebugCommandResponse"]:
- """Execute a data flow debug command.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param session_id: The ID of data flow debug session.
- :type session_id: str
- :param command: The command type.
- :type command: str or ~data_factory_management_client.models.DataFlowDebugCommandType
- :param command_payload: The command payload object.
- :type command_payload: ~data_factory_management_client.models.DataFlowDebugCommandPayload
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of AsyncLROPoller that returns either DataFlowDebugCommandResponse or the result of cls(response)
- :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.DataFlowDebugCommandResponse]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowDebugCommandResponse"]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = await self._execute_command_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- session_id=session_id,
- command=command,
- command_payload=command_payload,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- deserialized = self._deserialize('DataFlowDebugCommandResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
- return deserialized
-
- if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = AsyncNoPolling()
- else: polling_method = polling
- if cont_token:
- return AsyncLROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_execute_command.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_operations_async.py
deleted file mode 100644
index b5c2e5656ce..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_operations_async.py
+++ /dev/null
@@ -1,309 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
-import warnings
-
-from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class DataFlowOperations:
- """DataFlowOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- async def create_or_update(
- self,
- resource_group_name: str,
- factory_name: str,
- data_flow_name: str,
- properties: "models.DataFlow",
- if_match: Optional[str] = None,
- **kwargs
- ) -> "models.DataFlowResource":
- """Creates or updates a data flow.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param data_flow_name: The data flow name.
- :type data_flow_name: str
- :param properties: Data flow properties.
- :type properties: ~data_factory_management_client.models.DataFlow
- :param if_match: ETag of the data flow entity. Should only be specified for update, for which
- it should match existing entity or can be * for unconditional update.
- :type if_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: DataFlowResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.DataFlowResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- data_flow = models.DataFlowResource(properties=properties)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(data_flow, 'DataFlowResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('DataFlowResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore
-
- async def get(
- self,
- resource_group_name: str,
- factory_name: str,
- data_flow_name: str,
- if_none_match: Optional[str] = None,
- **kwargs
- ) -> "models.DataFlowResource":
- """Gets a data flow.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param data_flow_name: The data flow name.
- :type data_flow_name: str
- :param if_none_match: ETag of the data flow entity. Should only be specified for get. If the
- ETag matches the existing entity tag, or if * was provided, then no content will be returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: DataFlowResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.DataFlowResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('DataFlowResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore
-
- async def delete(
- self,
- resource_group_name: str,
- factory_name: str,
- data_flow_name: str,
- **kwargs
- ) -> None:
- """Deletes a data flow.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param data_flow_name: The data flow name.
- :type data_flow_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore
-
- def list_by_factory(
- self,
- resource_group_name: str,
- factory_name: str,
- **kwargs
- ) -> AsyncIterable["models.DataFlowListResponse"]:
- """Lists data flows.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either DataFlowListResponse or the result of cls(response)
- :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.DataFlowListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('DataFlowListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return AsyncItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_dataset_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_dataset_operations_async.py
deleted file mode 100644
index a8be0369365..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_dataset_operations_async.py
+++ /dev/null
@@ -1,311 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
-import warnings
-
-from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class DatasetOperations:
- """DatasetOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list_by_factory(
- self,
- resource_group_name: str,
- factory_name: str,
- **kwargs
- ) -> AsyncIterable["models.DatasetListResponse"]:
- """Lists datasets.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either DatasetListResponse or the result of cls(response)
- :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.DatasetListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('DatasetListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return AsyncItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets'} # type: ignore
-
- async def create_or_update(
- self,
- resource_group_name: str,
- factory_name: str,
- dataset_name: str,
- properties: "models.Dataset",
- if_match: Optional[str] = None,
- **kwargs
- ) -> "models.DatasetResource":
- """Creates or updates a dataset.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param dataset_name: The dataset name.
- :type dataset_name: str
- :param properties: Dataset properties.
- :type properties: ~data_factory_management_client.models.Dataset
- :param if_match: ETag of the dataset entity. Should only be specified for update, for which it
- should match existing entity or can be * for unconditional update.
- :type if_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: DatasetResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.DatasetResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- dataset = models.DatasetResource(properties=properties)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(dataset, 'DatasetResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('DatasetResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore
-
- async def get(
- self,
- resource_group_name: str,
- factory_name: str,
- dataset_name: str,
- if_none_match: Optional[str] = None,
- **kwargs
- ) -> Optional["models.DatasetResource"]:
- """Gets a dataset.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param dataset_name: The dataset name.
- :type dataset_name: str
- :param if_none_match: ETag of the dataset entity. Should only be specified for get. If the ETag
- matches the existing entity tag, or if * was provided, then no content will be returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: DatasetResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.DatasetResource or None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 304]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('DatasetResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore
-
- async def delete(
- self,
- resource_group_name: str,
- factory_name: str,
- dataset_name: str,
- **kwargs
- ) -> None:
- """Deletes a dataset.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param dataset_name: The dataset name.
- :type dataset_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_exposure_control_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_exposure_control_operations_async.py
deleted file mode 100644
index b20acb1c3c8..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_exposure_control_operations_async.py
+++ /dev/null
@@ -1,241 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class ExposureControlOperations:
- """ExposureControlOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- async def get_feature_value(
- self,
- location_id: str,
- feature_name: Optional[str] = None,
- feature_type: Optional[str] = None,
- **kwargs
- ) -> "models.ExposureControlResponse":
- """Get exposure control feature for specific location.
-
- :param location_id: The location identifier.
- :type location_id: str
- :param feature_name: The feature name.
- :type feature_name: str
- :param feature_type: The feature type.
- :type feature_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ExposureControlResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.ExposureControlResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.get_feature_value.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'locationId': self._serialize.url("location_id", location_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('ExposureControlResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_feature_value.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/getFeatureValue'} # type: ignore
-
- async def get_feature_value_by_factory(
- self,
- resource_group_name: str,
- factory_name: str,
- feature_name: Optional[str] = None,
- feature_type: Optional[str] = None,
- **kwargs
- ) -> "models.ExposureControlResponse":
- """Get exposure control feature for specific factory.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param feature_name: The feature name.
- :type feature_name: str
- :param feature_type: The feature type.
- :type feature_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ExposureControlResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.ExposureControlResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.get_feature_value_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('ExposureControlResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_feature_value_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getFeatureValue'} # type: ignore
-
- async def query_feature_value_by_factory(
- self,
- resource_group_name: str,
- factory_name: str,
- exposure_control_requests: List["models.ExposureControlRequest"],
- **kwargs
- ) -> "models.ExposureControlBatchResponse":
- """Get list of exposure control features for specific factory.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param exposure_control_requests: List of exposure control features.
- :type exposure_control_requests: list[~data_factory_management_client.models.ExposureControlRequest]
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ExposureControlBatchResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.ExposureControlBatchResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlBatchResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- exposure_control_batch_request = models.ExposureControlBatchRequest(exposure_control_requests=exposure_control_requests)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.query_feature_value_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(exposure_control_batch_request, 'ExposureControlBatchRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('ExposureControlBatchResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- query_feature_value_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryFeaturesValue'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_factory_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_factory_operations_async.py
deleted file mode 100644
index 46f37c1a6f7..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_factory_operations_async.py
+++ /dev/null
@@ -1,658 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
-import warnings
-
-from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class FactoryOperations:
- """FactoryOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list(
- self,
- **kwargs
- ) -> AsyncIterable["models.FactoryListResponse"]:
- """Lists factories under the specified subscription.
-
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either FactoryListResponse or the result of cls(response)
- :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.FactoryListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('FactoryListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return AsyncItemPaged(
- get_next, extract_data
- )
- list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/factories'} # type: ignore
-
- async def configure_factory_repo(
- self,
- location_id: str,
- factory_resource_id: Optional[str] = None,
- repo_configuration: Optional["models.FactoryRepoConfiguration"] = None,
- **kwargs
- ) -> "models.Factory":
- """Updates a factory's repo information.
-
- :param location_id: The location identifier.
- :type location_id: str
- :param factory_resource_id: The factory resource id.
- :type factory_resource_id: str
- :param repo_configuration: Git repo information of the factory.
- :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: Factory, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.Factory
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- factory_repo_update = models.FactoryRepoUpdate(factory_resource_id=factory_resource_id, repo_configuration=repo_configuration)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.configure_factory_repo.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'locationId': self._serialize.url("location_id", location_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(factory_repo_update, 'FactoryRepoUpdate')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('Factory', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- configure_factory_repo.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/configureFactoryRepo'} # type: ignore
-
- def list_by_resource_group(
- self,
- resource_group_name: str,
- **kwargs
- ) -> AsyncIterable["models.FactoryListResponse"]:
- """Lists factories.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either FactoryListResponse or the result of cls(response)
- :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.FactoryListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_resource_group.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('FactoryListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return AsyncItemPaged(
- get_next, extract_data
- )
- list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories'} # type: ignore
-
- async def create_or_update(
- self,
- resource_group_name: str,
- factory_name: str,
- if_match: Optional[str] = None,
- location: Optional[str] = None,
- tags: Optional[Dict[str, str]] = None,
- identity: Optional["models.FactoryIdentity"] = None,
- repo_configuration: Optional["models.FactoryRepoConfiguration"] = None,
- global_parameters: Optional[Dict[str, "models.GlobalParameterSpecification"]] = None,
- **kwargs
- ) -> "models.Factory":
- """Creates or updates a factory.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param if_match: ETag of the factory entity. Should only be specified for update, for which it
- should match existing entity or can be * for unconditional update.
- :type if_match: str
- :param location: The resource location.
- :type location: str
- :param tags: The resource tags.
- :type tags: dict[str, str]
- :param identity: Managed service identity of the factory.
- :type identity: ~data_factory_management_client.models.FactoryIdentity
- :param repo_configuration: Git repo information of the factory.
- :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration
- :param global_parameters: List of parameters for factory.
- :type global_parameters: dict[str, ~data_factory_management_client.models.GlobalParameterSpecification]
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: Factory, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.Factory
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- factory = models.Factory(location=location, tags=tags, identity=identity, repo_configuration=repo_configuration, global_parameters=global_parameters)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(factory, 'Factory')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('Factory', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore
-
- async def update(
- self,
- resource_group_name: str,
- factory_name: str,
- tags: Optional[Dict[str, str]] = None,
- identity: Optional["models.FactoryIdentity"] = None,
- **kwargs
- ) -> "models.Factory":
- """Updates a factory.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param tags: The resource tags.
- :type tags: dict[str, str]
- :param identity: Managed service identity of the factory.
- :type identity: ~data_factory_management_client.models.FactoryIdentity
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: Factory, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.Factory
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- factory_update_parameters = models.FactoryUpdateParameters(tags=tags, identity=identity)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(factory_update_parameters, 'FactoryUpdateParameters')
- body_content_kwargs['content'] = body_content
- request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('Factory', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore
-
- async def get(
- self,
- resource_group_name: str,
- factory_name: str,
- if_none_match: Optional[str] = None,
- **kwargs
- ) -> Optional["models.Factory"]:
- """Gets a factory.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param if_none_match: ETag of the factory entity. Should only be specified for get. If the ETag
- matches the existing entity tag, or if * was provided, then no content will be returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: Factory, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.Factory or None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Factory"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 304]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('Factory', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore
-
- async def delete(
- self,
- resource_group_name: str,
- factory_name: str,
- **kwargs
- ) -> None:
- """Deletes a factory.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore
-
- async def get_git_hub_access_token(
- self,
- resource_group_name: str,
- factory_name: str,
- git_hub_access_code: str,
- git_hub_access_token_base_url: str,
- git_hub_client_id: Optional[str] = None,
- **kwargs
- ) -> "models.GitHubAccessTokenResponse":
- """Get GitHub Access Token.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param git_hub_access_code: GitHub access code.
- :type git_hub_access_code: str
- :param git_hub_access_token_base_url: GitHub access token base URL.
- :type git_hub_access_token_base_url: str
- :param git_hub_client_id: GitHub application client ID.
- :type git_hub_client_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: GitHubAccessTokenResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.GitHubAccessTokenResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.GitHubAccessTokenResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- git_hub_access_token_request = models.GitHubAccessTokenRequest(git_hub_access_code=git_hub_access_code, git_hub_client_id=git_hub_client_id, git_hub_access_token_base_url=git_hub_access_token_base_url)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.get_git_hub_access_token.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(git_hub_access_token_request, 'GitHubAccessTokenRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('GitHubAccessTokenResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_git_hub_access_token.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getGitHubAccessToken'} # type: ignore
-
- async def get_data_plane_access(
- self,
- resource_group_name: str,
- factory_name: str,
- permissions: Optional[str] = None,
- access_resource_path: Optional[str] = None,
- profile_name: Optional[str] = None,
- start_time: Optional[str] = None,
- expire_time: Optional[str] = None,
- **kwargs
- ) -> "models.AccessPolicyResponse":
- """Get Data Plane access.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param permissions: The string with permissions for Data Plane access. Currently only 'r' is
- supported which grants read only access.
- :type permissions: str
- :param access_resource_path: The resource path to get access relative to factory. Currently
- only empty string is supported which corresponds to the factory resource.
- :type access_resource_path: str
- :param profile_name: The name of the profile. Currently only the default is supported. The
- default value is DefaultProfile.
- :type profile_name: str
- :param start_time: Start time for the token. If not specified the current time will be used.
- :type start_time: str
- :param expire_time: Expiration time for the token. Maximum duration for the token is eight
- hours and by default the token will expire in eight hours.
- :type expire_time: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: AccessPolicyResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.AccessPolicyResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.AccessPolicyResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- policy = models.UserAccessPolicy(permissions=permissions, access_resource_path=access_resource_path, profile_name=profile_name, start_time=start_time, expire_time=expire_time)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.get_data_plane_access.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(policy, 'UserAccessPolicy')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('AccessPolicyResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_data_plane_access.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getDataPlaneAccess'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_node_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_node_operations_async.py
deleted file mode 100644
index a6022196653..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_node_operations_async.py
+++ /dev/null
@@ -1,301 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Generic, Optional, TypeVar
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class IntegrationRuntimeNodeOperations:
- """IntegrationRuntimeNodeOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- async def get(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- node_name: str,
- **kwargs
- ) -> "models.SelfHostedIntegrationRuntimeNode":
- """Gets a self-hosted integration runtime node.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param node_name: The integration runtime node name.
- :type node_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore
-
- async def delete(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- node_name: str,
- **kwargs
- ) -> None:
- """Deletes a self-hosted integration runtime node.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param node_name: The integration runtime node name.
- :type node_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore
-
- async def update(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- node_name: str,
- concurrent_jobs_limit: Optional[int] = None,
- **kwargs
- ) -> "models.SelfHostedIntegrationRuntimeNode":
- """Updates a self-hosted integration runtime node.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param node_name: The integration runtime node name.
- :type node_name: str
- :param concurrent_jobs_limit: The number of concurrent jobs permitted to run on the integration
- runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed.
- :type concurrent_jobs_limit: int
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- update_integration_runtime_node_request = models.UpdateIntegrationRuntimeNodeRequest(concurrent_jobs_limit=concurrent_jobs_limit)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(update_integration_runtime_node_request, 'UpdateIntegrationRuntimeNodeRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore
-
- async def get_ip_address(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- node_name: str,
- **kwargs
- ) -> "models.IntegrationRuntimeNodeIpAddress":
- """Get the IP address of self-hosted integration runtime node.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param node_name: The integration runtime node name.
- :type node_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeNodeIpAddress, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeNodeIpAddress
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeNodeIpAddress"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get_ip_address.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeNodeIpAddress', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_ip_address.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}/ipAddress'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_object_metadata_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_object_metadata_operations_async.py
deleted file mode 100644
index 70df0716c21..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_object_metadata_operations_async.py
+++ /dev/null
@@ -1,230 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
-from azure.mgmt.core.exceptions import ARMErrorFormat
-from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class IntegrationRuntimeObjectMetadataOperations:
- """IntegrationRuntimeObjectMetadataOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- async def _refresh_initial(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- **kwargs
- ) -> Optional["models.SsisObjectMetadataStatusResponse"]:
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.SsisObjectMetadataStatusResponse"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self._refresh_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 202]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('SsisObjectMetadataStatusResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- _refresh_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata'} # type: ignore
-
- async def begin_refresh(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- **kwargs
- ) -> AsyncLROPoller["models.SsisObjectMetadataStatusResponse"]:
- """Refresh a SSIS integration runtime object metadata.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of AsyncLROPoller that returns either SsisObjectMetadataStatusResponse or the result of cls(response)
- :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.SsisObjectMetadataStatusResponse]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType["models.SsisObjectMetadataStatusResponse"]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = await self._refresh_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- integration_runtime_name=integration_runtime_name,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- deserialized = self._deserialize('SsisObjectMetadataStatusResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
- return deserialized
-
- if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = AsyncNoPolling()
- else: polling_method = polling
- if cont_token:
- return AsyncLROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_refresh.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata'} # type: ignore
-
- async def get(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- metadata_path: Optional[str] = None,
- **kwargs
- ) -> "models.SsisObjectMetadataListResponse":
- """Get a SSIS integration runtime object metadata by specified path. The return is pageable
- metadata list.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param metadata_path: Metadata path.
- :type metadata_path: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: SsisObjectMetadataListResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.SsisObjectMetadataListResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.SsisObjectMetadataListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- get_metadata_request = models.GetSsisObjectMetadataRequest(metadata_path=metadata_path)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- if get_metadata_request is not None:
- body_content = self._serialize.body(get_metadata_request, 'GetSsisObjectMetadataRequest')
- else:
- body_content = None
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('SsisObjectMetadataListResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getObjectMetadata'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py
deleted file mode 100644
index 82b285c7a74..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py
+++ /dev/null
@@ -1,1176 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
-import warnings
-
-from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
-from azure.mgmt.core.exceptions import ARMErrorFormat
-from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class IntegrationRuntimeOperations:
- """IntegrationRuntimeOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list_by_factory(
- self,
- resource_group_name: str,
- factory_name: str,
- **kwargs
- ) -> AsyncIterable["models.IntegrationRuntimeListResponse"]:
- """Lists integration runtimes.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either IntegrationRuntimeListResponse or the result of cls(response)
- :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.IntegrationRuntimeListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('IntegrationRuntimeListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return AsyncItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes'} # type: ignore
-
- async def create_or_update(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- properties: "models.IntegrationRuntime",
- if_match: Optional[str] = None,
- **kwargs
- ) -> "models.IntegrationRuntimeResource":
- """Creates or updates an integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param properties: Integration runtime properties.
- :type properties: ~data_factory_management_client.models.IntegrationRuntime
- :param if_match: ETag of the integration runtime entity. Should only be specified for update,
- for which it should match existing entity or can be * for unconditional update.
- :type if_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- integration_runtime = models.IntegrationRuntimeResource(properties=properties)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(integration_runtime, 'IntegrationRuntimeResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore
-
- async def get(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- if_none_match: Optional[str] = None,
- **kwargs
- ) -> Optional["models.IntegrationRuntimeResource"]:
- """Gets an integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param if_none_match: ETag of the integration runtime entity. Should only be specified for get.
- If the ETag matches the existing entity tag, or if * was provided, then no content will be
- returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource or None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeResource"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 304]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore
-
- async def update(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- auto_update: Optional[Union[str, "models.IntegrationRuntimeAutoUpdate"]] = None,
- update_delay_offset: Optional[str] = None,
- **kwargs
- ) -> "models.IntegrationRuntimeResource":
- """Updates an integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param auto_update: Enables or disables the auto-update feature of the self-hosted integration
- runtime. See https://go.microsoft.com/fwlink/?linkid=854189.
- :type auto_update: str or ~data_factory_management_client.models.IntegrationRuntimeAutoUpdate
- :param update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The
- integration runtime auto update will happen on that time.
- :type update_delay_offset: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- update_integration_runtime_request = models.UpdateIntegrationRuntimeRequest(auto_update=auto_update, update_delay_offset=update_delay_offset)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore
-
- async def delete(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- **kwargs
- ) -> None:
- """Deletes an integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore
-
- async def get_status(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- **kwargs
- ) -> "models.IntegrationRuntimeStatusResponse":
- """Gets detailed status information for an integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeStatusResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get_status.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus'} # type: ignore
-
- async def get_connection_info(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- **kwargs
- ) -> "models.IntegrationRuntimeConnectionInfo":
- """Gets the on-premises integration runtime connection information for encrypting the on-premises
- data source credentials.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeConnectionInfo, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeConnectionInfo
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeConnectionInfo"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get_connection_info.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeConnectionInfo', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_connection_info.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getConnectionInfo'} # type: ignore
-
- async def regenerate_auth_key(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- key_name: Optional[Union[str, "models.IntegrationRuntimeAuthKeyName"]] = None,
- **kwargs
- ) -> "models.IntegrationRuntimeAuthKeys":
- """Regenerates the authentication key for an integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param key_name: The name of the authentication key to regenerate.
- :type key_name: str or ~data_factory_management_client.models.IntegrationRuntimeAuthKeyName
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeAuthKeys, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- regenerate_key_parameters = models.IntegrationRuntimeRegenerateKeyParameters(key_name=key_name)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.regenerate_auth_key.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- regenerate_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/regenerateAuthKey'} # type: ignore
-
- async def list_auth_key(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- **kwargs
- ) -> "models.IntegrationRuntimeAuthKeys":
- """Retrieves the authentication keys for an integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeAuthKeys, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.list_auth_key.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- list_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/listAuthKeys'} # type: ignore
-
- async def _start_initial(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- **kwargs
- ) -> Optional["models.IntegrationRuntimeStatusResponse"]:
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeStatusResponse"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self._start_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 202]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore
-
- async def begin_start(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- **kwargs
- ) -> AsyncLROPoller["models.IntegrationRuntimeStatusResponse"]:
- """Starts a ManagedReserved type integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of AsyncLROPoller that returns either IntegrationRuntimeStatusResponse or the result of cls(response)
- :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.IntegrationRuntimeStatusResponse]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = await self._start_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- integration_runtime_name=integration_runtime_name,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
- return deserialized
-
- if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = AsyncNoPolling()
- else: polling_method = polling
- if cont_token:
- return AsyncLROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore
-
- async def _stop_initial(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- **kwargs
- ) -> None:
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self._stop_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 202]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore
-
- async def begin_stop(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- **kwargs
- ) -> AsyncLROPoller[None]:
- """Stops a ManagedReserved type integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
- :rtype: ~azure.core.polling.AsyncLROPoller[None]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = await self._stop_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- integration_runtime_name=integration_runtime_name,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- if cls:
- return cls(pipeline_response, None, {})
-
- if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = AsyncNoPolling()
- else: polling_method = polling
- if cont_token:
- return AsyncLROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore
-
- async def sync_credentials(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- **kwargs
- ) -> None:
- """Force the integration runtime to synchronize credentials across integration runtime nodes, and
- this will override the credentials across all worker nodes with those available on the
- dispatcher node. If you already have the latest credential backup file, you should manually
- import it (preferred) on any self-hosted integration runtime node than using this API directly.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.sync_credentials.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- sync_credentials.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/syncCredentials'} # type: ignore
-
- async def get_monitoring_data(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- **kwargs
- ) -> "models.IntegrationRuntimeMonitoringData":
- """Get the integration runtime monitoring data, which includes the monitor data for all the nodes
- under this integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeMonitoringData, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeMonitoringData
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeMonitoringData"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get_monitoring_data.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeMonitoringData', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_monitoring_data.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/monitoringData'} # type: ignore
-
- async def upgrade(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- **kwargs
- ) -> None:
- """Upgrade self-hosted integration runtime to latest version if availability.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.upgrade.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- upgrade.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/upgrade'} # type: ignore
-
- async def remove_link(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- linked_factory_name: str,
- **kwargs
- ) -> None:
- """Remove all linked integration runtimes under specific data factory in a self-hosted integration
- runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param linked_factory_name: The data factory name for linked integration runtime.
- :type linked_factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- linked_integration_runtime_request = models.LinkedIntegrationRuntimeRequest(linked_factory_name=linked_factory_name)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.remove_link.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- remove_link.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/removeLinks'} # type: ignore
-
- async def create_linked_integration_runtime(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- name: Optional[str] = None,
- subscription_id: Optional[str] = None,
- data_factory_name: Optional[str] = None,
- data_factory_location: Optional[str] = None,
- **kwargs
- ) -> "models.IntegrationRuntimeStatusResponse":
- """Create a linked integration runtime entry in a shared integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param name: The name of the linked integration runtime.
- :type name: str
- :param subscription_id: The ID of the subscription that the linked integration runtime belongs
- to.
- :type subscription_id: str
- :param data_factory_name: The name of the data factory that the linked integration runtime
- belongs to.
- :type data_factory_name: str
- :param data_factory_location: The location of the data factory that the linked integration
- runtime belongs to.
- :type data_factory_location: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeStatusResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- create_linked_integration_runtime_request = models.CreateLinkedIntegrationRuntimeRequest(name=name, subscription_id=subscription_id, data_factory_name=data_factory_name, data_factory_location=data_factory_location)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_linked_integration_runtime.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_linked_integration_runtime.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/linkedIntegrationRuntime'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_linked_service_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_linked_service_operations_async.py
deleted file mode 100644
index 56e9e6f663a..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_linked_service_operations_async.py
+++ /dev/null
@@ -1,312 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
-import warnings
-
-from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class LinkedServiceOperations:
- """LinkedServiceOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list_by_factory(
- self,
- resource_group_name: str,
- factory_name: str,
- **kwargs
- ) -> AsyncIterable["models.LinkedServiceListResponse"]:
- """Lists linked services.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either LinkedServiceListResponse or the result of cls(response)
- :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.LinkedServiceListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('LinkedServiceListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return AsyncItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices'} # type: ignore
-
- async def create_or_update(
- self,
- resource_group_name: str,
- factory_name: str,
- linked_service_name: str,
- properties: "models.LinkedService",
- if_match: Optional[str] = None,
- **kwargs
- ) -> "models.LinkedServiceResource":
- """Creates or updates a linked service.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param linked_service_name: The linked service name.
- :type linked_service_name: str
- :param properties: Properties of linked service.
- :type properties: ~data_factory_management_client.models.LinkedService
- :param if_match: ETag of the linkedService entity. Should only be specified for update, for
- which it should match existing entity or can be * for unconditional update.
- :type if_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: LinkedServiceResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.LinkedServiceResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- linked_service = models.LinkedServiceResource(properties=properties)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(linked_service, 'LinkedServiceResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('LinkedServiceResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore
-
- async def get(
- self,
- resource_group_name: str,
- factory_name: str,
- linked_service_name: str,
- if_none_match: Optional[str] = None,
- **kwargs
- ) -> Optional["models.LinkedServiceResource"]:
- """Gets a linked service.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param linked_service_name: The linked service name.
- :type linked_service_name: str
- :param if_none_match: ETag of the linked service entity. Should only be specified for get. If
- the ETag matches the existing entity tag, or if * was provided, then no content will be
- returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: LinkedServiceResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.LinkedServiceResource or None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 304]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('LinkedServiceResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore
-
- async def delete(
- self,
- resource_group_name: str,
- factory_name: str,
- linked_service_name: str,
- **kwargs
- ) -> None:
- """Deletes a linked service.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param linked_service_name: The linked service name.
- :type linked_service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_private_endpoint_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_private_endpoint_operations_async.py
deleted file mode 100644
index 3a899779963..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_private_endpoint_operations_async.py
+++ /dev/null
@@ -1,336 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar
-import warnings
-
-from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class ManagedPrivateEndpointOperations:
- """ManagedPrivateEndpointOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list_by_factory(
- self,
- resource_group_name: str,
- factory_name: str,
- managed_virtual_network_name: str,
- **kwargs
- ) -> AsyncIterable["models.ManagedPrivateEndpointListResponse"]:
- """Lists managed private endpoints.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param managed_virtual_network_name: Managed virtual network name.
- :type managed_virtual_network_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either ManagedPrivateEndpointListResponse or the result of cls(response)
- :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.ManagedPrivateEndpointListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('ManagedPrivateEndpointListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return AsyncItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints'} # type: ignore
-
- async def create_or_update(
- self,
- resource_group_name: str,
- factory_name: str,
- managed_virtual_network_name: str,
- managed_private_endpoint_name: str,
- if_match: Optional[str] = None,
- connection_state: Optional["models.ConnectionStateProperties"] = None,
- fqdns: Optional[List[str]] = None,
- group_id: Optional[str] = None,
- private_link_resource_id: Optional[str] = None,
- **kwargs
- ) -> "models.ManagedPrivateEndpointResource":
- """Creates or updates a managed private endpoint.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param managed_virtual_network_name: Managed virtual network name.
- :type managed_virtual_network_name: str
- :param managed_private_endpoint_name: Managed private endpoint name.
- :type managed_private_endpoint_name: str
- :param if_match: ETag of the managed private endpoint entity. Should only be specified for
- update, for which it should match existing entity or can be * for unconditional update.
- :type if_match: str
- :param connection_state: The managed private endpoint connection state.
- :type connection_state: ~data_factory_management_client.models.ConnectionStateProperties
- :param fqdns: Fully qualified domain names.
- :type fqdns: list[str]
- :param group_id: The groupId to which the managed private endpoint is created.
- :type group_id: str
- :param private_link_resource_id: The ARM resource ID of the resource to which the managed
- private endpoint is created.
- :type private_link_resource_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ManagedPrivateEndpointResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- managed_private_endpoint = models.ManagedPrivateEndpointResource(connection_state=connection_state, fqdns=fqdns, group_id=group_id, private_link_resource_id=private_link_resource_id)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(managed_private_endpoint, 'ManagedPrivateEndpointResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore
-
- async def get(
- self,
- resource_group_name: str,
- factory_name: str,
- managed_virtual_network_name: str,
- managed_private_endpoint_name: str,
- if_none_match: Optional[str] = None,
- **kwargs
- ) -> "models.ManagedPrivateEndpointResource":
- """Gets a managed private endpoint.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param managed_virtual_network_name: Managed virtual network name.
- :type managed_virtual_network_name: str
- :param managed_private_endpoint_name: Managed private endpoint name.
- :type managed_private_endpoint_name: str
- :param if_none_match: ETag of the managed private endpoint entity. Should only be specified for
- get. If the ETag matches the existing entity tag, or if * was provided, then no content will be
- returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ManagedPrivateEndpointResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore
-
- async def delete(
- self,
- resource_group_name: str,
- factory_name: str,
- managed_virtual_network_name: str,
- managed_private_endpoint_name: str,
- **kwargs
- ) -> None:
- """Deletes a managed private endpoint.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param managed_virtual_network_name: Managed virtual network name.
- :type managed_virtual_network_name: str
- :param managed_private_endpoint_name: Managed private endpoint name.
- :type managed_private_endpoint_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_virtual_network_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_virtual_network_operations_async.py
deleted file mode 100644
index 2152988d7ef..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_virtual_network_operations_async.py
+++ /dev/null
@@ -1,255 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
-import warnings
-
-from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class ManagedVirtualNetworkOperations:
- """ManagedVirtualNetworkOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list_by_factory(
- self,
- resource_group_name: str,
- factory_name: str,
- **kwargs
- ) -> AsyncIterable["models.ManagedVirtualNetworkListResponse"]:
- """Lists managed Virtual Networks.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either ManagedVirtualNetworkListResponse or the result of cls(response)
- :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.ManagedVirtualNetworkListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('ManagedVirtualNetworkListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return AsyncItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks'} # type: ignore
-
- async def create_or_update(
- self,
- resource_group_name: str,
- factory_name: str,
- managed_virtual_network_name: str,
- properties: "models.ManagedVirtualNetwork",
- if_match: Optional[str] = None,
- **kwargs
- ) -> "models.ManagedVirtualNetworkResource":
- """Creates or updates a managed Virtual Network.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param managed_virtual_network_name: Managed virtual network name.
- :type managed_virtual_network_name: str
- :param properties: Managed Virtual Network properties.
- :type properties: ~data_factory_management_client.models.ManagedVirtualNetwork
- :param if_match: ETag of the managed Virtual Network entity. Should only be specified for
- update, for which it should match existing entity or can be * for unconditional update.
- :type if_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ManagedVirtualNetworkResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- managed_virtual_network = models.ManagedVirtualNetworkResource(properties=properties)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(managed_virtual_network, 'ManagedVirtualNetworkResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore
-
- async def get(
- self,
- resource_group_name: str,
- factory_name: str,
- managed_virtual_network_name: str,
- if_none_match: Optional[str] = None,
- **kwargs
- ) -> "models.ManagedVirtualNetworkResource":
- """Gets a managed Virtual Network.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param managed_virtual_network_name: Managed virtual network name.
- :type managed_virtual_network_name: str
- :param if_none_match: ETag of the managed Virtual Network entity. Should only be specified for
- get. If the ETag matches the existing entity tag, or if * was provided, then no content will be
- returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ManagedVirtualNetworkResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_operation_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_operation_operations_async.py
deleted file mode 100644
index 83206d77039..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_operation_operations_async.py
+++ /dev/null
@@ -1,101 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
-import warnings
-
-from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class OperationOperations:
- """OperationOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list(
- self,
- **kwargs
- ) -> AsyncIterable["models.OperationListResponse"]:
- """Lists the available Azure Data Factory API operations.
-
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either OperationListResponse or the result of cls(response)
- :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.OperationListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list.metadata['url'] # type: ignore
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('OperationListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return AsyncItemPaged(
- get_next, extract_data
- )
- list.metadata = {'url': '/providers/Microsoft.DataFactory/operations'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_operations_async.py
deleted file mode 100644
index 34c7453f951..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_operations_async.py
+++ /dev/null
@@ -1,405 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
-import warnings
-
-from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class PipelineOperations:
- """PipelineOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list_by_factory(
- self,
- resource_group_name: str,
- factory_name: str,
- **kwargs
- ) -> AsyncIterable["models.PipelineListResponse"]:
- """Lists pipelines.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either PipelineListResponse or the result of cls(response)
- :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.PipelineListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('PipelineListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return AsyncItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines'} # type: ignore
-
- async def create_or_update(
- self,
- resource_group_name: str,
- factory_name: str,
- pipeline_name: str,
- pipeline: "models.PipelineResource",
- if_match: Optional[str] = None,
- **kwargs
- ) -> "models.PipelineResource":
- """Creates or updates a pipeline.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param pipeline_name: The pipeline name.
- :type pipeline_name: str
- :param pipeline: Pipeline resource definition.
- :type pipeline: ~data_factory_management_client.models.PipelineResource
- :param if_match: ETag of the pipeline entity. Should only be specified for update, for which
- it should match existing entity or can be * for unconditional update.
- :type if_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: PipelineResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.PipelineResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(pipeline, 'PipelineResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('PipelineResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore
-
- async def get(
- self,
- resource_group_name: str,
- factory_name: str,
- pipeline_name: str,
- if_none_match: Optional[str] = None,
- **kwargs
- ) -> Optional["models.PipelineResource"]:
- """Gets a pipeline.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param pipeline_name: The pipeline name.
- :type pipeline_name: str
- :param if_none_match: ETag of the pipeline entity. Should only be specified for get. If the
- ETag matches the existing entity tag, or if * was provided, then no content will be returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: PipelineResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.PipelineResource or None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 304]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('PipelineResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore
-
- async def delete(
- self,
- resource_group_name: str,
- factory_name: str,
- pipeline_name: str,
- **kwargs
- ) -> None:
- """Deletes a pipeline.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param pipeline_name: The pipeline name.
- :type pipeline_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore
-
- async def create_run(
- self,
- resource_group_name: str,
- factory_name: str,
- pipeline_name: str,
- reference_pipeline_run_id: Optional[str] = None,
- is_recovery: Optional[bool] = None,
- start_activity_name: Optional[str] = None,
- start_from_failure: Optional[bool] = None,
- parameters: Optional[Dict[str, object]] = None,
- **kwargs
- ) -> "models.CreateRunResponse":
- """Creates a run of a pipeline.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param pipeline_name: The pipeline name.
- :type pipeline_name: str
- :param reference_pipeline_run_id: The pipeline run identifier. If run ID is specified the
- parameters of the specified run will be used to create a new run.
- :type reference_pipeline_run_id: str
- :param is_recovery: Recovery mode flag. If recovery mode is set to true, the specified
- referenced pipeline run and the new run will be grouped under the same groupId.
- :type is_recovery: bool
- :param start_activity_name: In recovery mode, the rerun will start from this activity. If not
- specified, all activities will run.
- :type start_activity_name: str
- :param start_from_failure: In recovery mode, if set to true, the rerun will start from failed
- activities. The property will be used only if startActivityName is not specified.
- :type start_from_failure: bool
- :param parameters: Parameters of the pipeline run. These parameters will be used only if the
- runId is not specified.
- :type parameters: dict[str, object]
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: CreateRunResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.CreateRunResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.CreateRunResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_run.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if reference_pipeline_run_id is not None:
- query_parameters['referencePipelineRunId'] = self._serialize.query("reference_pipeline_run_id", reference_pipeline_run_id, 'str')
- if is_recovery is not None:
- query_parameters['isRecovery'] = self._serialize.query("is_recovery", is_recovery, 'bool')
- if start_activity_name is not None:
- query_parameters['startActivityName'] = self._serialize.query("start_activity_name", start_activity_name, 'str')
- if start_from_failure is not None:
- query_parameters['startFromFailure'] = self._serialize.query("start_from_failure", start_from_failure, 'bool')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- if parameters is not None:
- body_content = self._serialize.body(parameters, '{object}')
- else:
- body_content = None
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('CreateRunResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}/createRun'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_run_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_run_operations_async.py
deleted file mode 100644
index 5cdfd09fe01..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_run_operations_async.py
+++ /dev/null
@@ -1,243 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-import datetime
-from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class PipelineRunOperations:
- """PipelineRunOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- async def query_by_factory(
- self,
- resource_group_name: str,
- factory_name: str,
- last_updated_after: datetime.datetime,
- last_updated_before: datetime.datetime,
- continuation_token_parameter: Optional[str] = None,
- filters: Optional[List["models.RunQueryFilter"]] = None,
- order_by: Optional[List["models.RunQueryOrderBy"]] = None,
- **kwargs
- ) -> "models.PipelineRunsQueryResponse":
- """Query pipeline runs in the factory based on input filter conditions.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601'
- format.
- :type last_updated_after: ~datetime.datetime
- :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601'
- format.
- :type last_updated_before: ~datetime.datetime
- :param continuation_token_parameter: The continuation token for getting the next page of
- results. Null for first page.
- :type continuation_token_parameter: str
- :param filters: List of filters.
- :type filters: list[~data_factory_management_client.models.RunQueryFilter]
- :param order_by: List of OrderBy option.
- :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy]
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: PipelineRunsQueryResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.PipelineRunsQueryResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRunsQueryResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.query_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(filter_parameters, 'RunFilterParameters')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('PipelineRunsQueryResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryPipelineRuns'} # type: ignore
-
- async def get(
- self,
- resource_group_name: str,
- factory_name: str,
- run_id: str,
- **kwargs
- ) -> "models.PipelineRun":
- """Get a pipeline run by its run ID.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param run_id: The pipeline run identifier.
- :type run_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: PipelineRun, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.PipelineRun
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRun"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'runId': self._serialize.url("run_id", run_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('PipelineRun', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}'} # type: ignore
-
- async def cancel(
- self,
- resource_group_name: str,
- factory_name: str,
- run_id: str,
- is_recursive: Optional[bool] = None,
- **kwargs
- ) -> None:
- """Cancel a pipeline run by its run ID.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param run_id: The pipeline run identifier.
- :type run_id: str
- :param is_recursive: If true, cancel all the Child pipelines that are triggered by the current
- pipeline.
- :type is_recursive: bool
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.cancel.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'runId': self._serialize.url("run_id", run_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- if is_recursive is not None:
- query_parameters['isRecursive'] = self._serialize.query("is_recursive", is_recursive, 'bool')
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/cancel'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py
deleted file mode 100644
index f4669b45bc2..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py
+++ /dev/null
@@ -1,877 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
-import warnings
-
-from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
-from azure.mgmt.core.exceptions import ARMErrorFormat
-from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class TriggerOperations:
- """TriggerOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list_by_factory(
- self,
- resource_group_name: str,
- factory_name: str,
- **kwargs
- ) -> AsyncIterable["models.TriggerListResponse"]:
- """Lists triggers.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either TriggerListResponse or the result of cls(response)
- :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.TriggerListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('TriggerListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return AsyncItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers'} # type: ignore
-
- async def query_by_factory(
- self,
- resource_group_name: str,
- factory_name: str,
- continuation_token_parameter: Optional[str] = None,
- parent_trigger_name: Optional[str] = None,
- **kwargs
- ) -> "models.TriggerQueryResponse":
- """Query triggers.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param continuation_token_parameter: The continuation token for getting the next page of
- results. Null for first page.
- :type continuation_token_parameter: str
- :param parent_trigger_name: The name of the parent TumblingWindowTrigger to get the child rerun
- triggers.
- :type parent_trigger_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: TriggerQueryResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.TriggerQueryResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerQueryResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- filter_parameters = models.TriggerFilterParameters(continuation_token=continuation_token_parameter, parent_trigger_name=parent_trigger_name)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.query_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(filter_parameters, 'TriggerFilterParameters')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('TriggerQueryResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/querytriggers'} # type: ignore
-
- async def create_or_update(
- self,
- resource_group_name: str,
- factory_name: str,
- trigger_name: str,
- properties: "models.Trigger",
- if_match: Optional[str] = None,
- **kwargs
- ) -> "models.TriggerResource":
- """Creates or updates a trigger.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :param properties: Properties of the trigger.
- :type properties: ~data_factory_management_client.models.Trigger
- :param if_match: ETag of the trigger entity. Should only be specified for update, for which it
- should match existing entity or can be * for unconditional update.
- :type if_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: TriggerResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.TriggerResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- trigger = models.TriggerResource(properties=properties)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(trigger, 'TriggerResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('TriggerResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore
-
- async def get(
- self,
- resource_group_name: str,
- factory_name: str,
- trigger_name: str,
- if_none_match: Optional[str] = None,
- **kwargs
- ) -> Optional["models.TriggerResource"]:
- """Gets a trigger.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :param if_none_match: ETag of the trigger entity. Should only be specified for get. If the ETag
- matches the existing entity tag, or if * was provided, then no content will be returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: TriggerResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.TriggerResource or None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 304]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('TriggerResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore
-
- async def delete(
- self,
- resource_group_name: str,
- factory_name: str,
- trigger_name: str,
- **kwargs
- ) -> None:
- """Deletes a trigger.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore
-
- async def _subscribe_to_event_initial(
- self,
- resource_group_name: str,
- factory_name: str,
- trigger_name: str,
- **kwargs
- ) -> Optional["models.TriggerSubscriptionOperationStatus"]:
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self._subscribe_to_event_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 202]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- _subscribe_to_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore
-
- async def begin_subscribe_to_event(
- self,
- resource_group_name: str,
- factory_name: str,
- trigger_name: str,
- **kwargs
- ) -> AsyncLROPoller["models.TriggerSubscriptionOperationStatus"]:
- """Subscribe event trigger to events.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of AsyncLROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response)
- :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = await self._subscribe_to_event_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- trigger_name=trigger_name,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
- return deserialized
-
- if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = AsyncNoPolling()
- else: polling_method = polling
- if cont_token:
- return AsyncLROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_subscribe_to_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore
-
- async def get_event_subscription_status(
- self,
- resource_group_name: str,
- factory_name: str,
- trigger_name: str,
- **kwargs
- ) -> "models.TriggerSubscriptionOperationStatus":
- """Get a trigger's event subscription status.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: TriggerSubscriptionOperationStatus, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.TriggerSubscriptionOperationStatus
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get_event_subscription_status.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_event_subscription_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/getEventSubscriptionStatus'} # type: ignore
-
- async def _unsubscribe_from_event_initial(
- self,
- resource_group_name: str,
- factory_name: str,
- trigger_name: str,
- **kwargs
- ) -> Optional["models.TriggerSubscriptionOperationStatus"]:
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self._unsubscribe_from_event_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 202]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- _unsubscribe_from_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore
-
- async def begin_unsubscribe_from_event(
- self,
- resource_group_name: str,
- factory_name: str,
- trigger_name: str,
- **kwargs
- ) -> AsyncLROPoller["models.TriggerSubscriptionOperationStatus"]:
- """Unsubscribe event trigger from events.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of AsyncLROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response)
- :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = await self._unsubscribe_from_event_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- trigger_name=trigger_name,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
- return deserialized
-
- if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = AsyncNoPolling()
- else: polling_method = polling
- if cont_token:
- return AsyncLROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_unsubscribe_from_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore
-
- async def _start_initial(
- self,
- resource_group_name: str,
- factory_name: str,
- trigger_name: str,
- **kwargs
- ) -> None:
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self._start_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore
-
- async def begin_start(
- self,
- resource_group_name: str,
- factory_name: str,
- trigger_name: str,
- **kwargs
- ) -> AsyncLROPoller[None]:
- """Starts a trigger.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
- :rtype: ~azure.core.polling.AsyncLROPoller[None]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = await self._start_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- trigger_name=trigger_name,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- if cls:
- return cls(pipeline_response, None, {})
-
- if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = AsyncNoPolling()
- else: polling_method = polling
- if cont_token:
- return AsyncLROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore
-
- async def _stop_initial(
- self,
- resource_group_name: str,
- factory_name: str,
- trigger_name: str,
- **kwargs
- ) -> None:
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self._stop_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore
-
- async def begin_stop(
- self,
- resource_group_name: str,
- factory_name: str,
- trigger_name: str,
- **kwargs
- ) -> AsyncLROPoller[None]:
- """Stops a trigger.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
- :rtype: ~azure.core.polling.AsyncLROPoller[None]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = await self._stop_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- trigger_name=trigger_name,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- if cls:
- return cls(pipeline_response, None, {})
-
- if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = AsyncNoPolling()
- else: polling_method = polling
- if cont_token:
- return AsyncLROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_run_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_run_operations_async.py
deleted file mode 100644
index 3401f9c95c1..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_run_operations_async.py
+++ /dev/null
@@ -1,241 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-import datetime
-from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class TriggerRunOperations:
- """TriggerRunOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- async def rerun(
- self,
- resource_group_name: str,
- factory_name: str,
- trigger_name: str,
- run_id: str,
- **kwargs
- ) -> None:
- """Rerun single trigger instance by runId.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :param run_id: The pipeline run identifier.
- :type run_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.rerun.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- 'runId': self._serialize.url("run_id", run_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- rerun.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/rerun'} # type: ignore
-
- async def cancel(
- self,
- resource_group_name: str,
- factory_name: str,
- trigger_name: str,
- run_id: str,
- **kwargs
- ) -> None:
- """Cancel a single trigger instance by runId.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :param run_id: The pipeline run identifier.
- :type run_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.cancel.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- 'runId': self._serialize.url("run_id", run_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/cancel'} # type: ignore
-
- async def query_by_factory(
- self,
- resource_group_name: str,
- factory_name: str,
- last_updated_after: datetime.datetime,
- last_updated_before: datetime.datetime,
- continuation_token_parameter: Optional[str] = None,
- filters: Optional[List["models.RunQueryFilter"]] = None,
- order_by: Optional[List["models.RunQueryOrderBy"]] = None,
- **kwargs
- ) -> "models.TriggerRunsQueryResponse":
- """Query trigger runs.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601'
- format.
- :type last_updated_after: ~datetime.datetime
- :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601'
- format.
- :type last_updated_before: ~datetime.datetime
- :param continuation_token_parameter: The continuation token for getting the next page of
- results. Null for first page.
- :type continuation_token_parameter: str
- :param filters: List of filters.
- :type filters: list[~data_factory_management_client.models.RunQueryFilter]
- :param order_by: List of OrderBy option.
- :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy]
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: TriggerRunsQueryResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.TriggerRunsQueryResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerRunsQueryResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.query_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(filter_parameters, 'RunFilterParameters')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('TriggerRunsQueryResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryTriggerRuns'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py
index 1f1ab102631..d558e88e00d 100644
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py
+++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py
@@ -157,6 +157,9 @@
from ._models_py3 import CreateDataFlowDebugSessionResponse
from ._models_py3 import CreateLinkedIntegrationRuntimeRequest
from ._models_py3 import CreateRunResponse
+ from ._models_py3 import Credential
+ from ._models_py3 import CredentialReference
+ from ._models_py3 import CredentialResource
from ._models_py3 import CustomActivity
from ._models_py3 import CustomActivityReferenceObject
from ._models_py3 import CustomDataSourceLinkedService
@@ -277,6 +280,7 @@
from ._models_py3 import GetSsisObjectMetadataRequest
from ._models_py3 import GitHubAccessTokenRequest
from ._models_py3 import GitHubAccessTokenResponse
+ from ._models_py3 import GitHubClientSecret
from ._models_py3 import GlobalParameterSpecification
from ._models_py3 import GoogleAdWordsLinkedService
from ._models_py3 import GoogleAdWordsObjectDataset
@@ -336,6 +340,10 @@
from ._models_py3 import IntegrationRuntimeMonitoringData
from ._models_py3 import IntegrationRuntimeNodeIpAddress
from ._models_py3 import IntegrationRuntimeNodeMonitoringData
+ from ._models_py3 import IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint
+ from ._models_py3 import IntegrationRuntimeOutboundNetworkDependenciesEndpoint
+ from ._models_py3 import IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails
+ from ._models_py3 import IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse
from ._models_py3 import IntegrationRuntimeReference
from ._models_py3 import IntegrationRuntimeRegenerateKeyParameters
from ._models_py3 import IntegrationRuntimeResource
@@ -371,6 +379,7 @@
from ._models_py3 import MagentoLinkedService
from ._models_py3 import MagentoObjectDataset
from ._models_py3 import MagentoSource
+ from ._models_py3 import ManagedIdentityCredential
from ._models_py3 import ManagedIntegrationRuntime
from ._models_py3 import ManagedIntegrationRuntimeError
from ._models_py3 import ManagedIntegrationRuntimeNode
@@ -390,12 +399,14 @@
from ._models_py3 import MarketoLinkedService
from ._models_py3 import MarketoObjectDataset
from ._models_py3 import MarketoSource
+ from ._models_py3 import MetadataItem
from ._models_py3 import MicrosoftAccessLinkedService
from ._models_py3 import MicrosoftAccessSink
from ._models_py3 import MicrosoftAccessSource
from ._models_py3 import MicrosoftAccessTableDataset
from ._models_py3 import MongoDbAtlasCollectionDataset
from ._models_py3 import MongoDbAtlasLinkedService
+ from ._models_py3 import MongoDbAtlasSink
from ._models_py3 import MongoDbAtlasSource
from ._models_py3 import MongoDbCollectionDataset
from ._models_py3 import MongoDbCursorMethodsProperties
@@ -403,6 +414,7 @@
from ._models_py3 import MongoDbSource
from ._models_py3 import MongoDbV2CollectionDataset
from ._models_py3 import MongoDbV2LinkedService
+ from ._models_py3 import MongoDbV2Sink
from ._models_py3 import MongoDbV2Source
from ._models_py3 import MultiplePipelineTrigger
from ._models_py3 import MySqlLinkedService
@@ -551,6 +563,7 @@
from ._models_py3 import ServiceNowLinkedService
from ._models_py3 import ServiceNowObjectDataset
from ._models_py3 import ServiceNowSource
+ from ._models_py3 import ServicePrincipalCredential
from ._models_py3 import SetVariableActivity
from ._models_py3 import SftpLocation
from ._models_py3 import SftpReadSettings
@@ -575,6 +588,7 @@
from ._models_py3 import SqlAlwaysEncryptedProperties
from ._models_py3 import SqlDwSink
from ._models_py3 import SqlDwSource
+ from ._models_py3 import SqlDwUpsertSettings
from ._models_py3 import SqlMiSink
from ._models_py3 import SqlMiSource
from ._models_py3 import SqlPartitionSettings
@@ -585,6 +599,7 @@
from ._models_py3 import SqlServerTableDataset
from ._models_py3 import SqlSink
from ._models_py3 import SqlSource
+ from ._models_py3 import SqlUpsertSettings
from ._models_py3 import SquareLinkedService
from ._models_py3 import SquareObjectDataset
from ._models_py3 import SquareSource
@@ -822,6 +837,9 @@
from ._models import CreateDataFlowDebugSessionResponse # type: ignore
from ._models import CreateLinkedIntegrationRuntimeRequest # type: ignore
from ._models import CreateRunResponse # type: ignore
+ from ._models import Credential # type: ignore
+ from ._models import CredentialReference # type: ignore
+ from ._models import CredentialResource # type: ignore
from ._models import CustomActivity # type: ignore
from ._models import CustomActivityReferenceObject # type: ignore
from ._models import CustomDataSourceLinkedService # type: ignore
@@ -942,6 +960,7 @@
from ._models import GetSsisObjectMetadataRequest # type: ignore
from ._models import GitHubAccessTokenRequest # type: ignore
from ._models import GitHubAccessTokenResponse # type: ignore
+ from ._models import GitHubClientSecret # type: ignore
from ._models import GlobalParameterSpecification # type: ignore
from ._models import GoogleAdWordsLinkedService # type: ignore
from ._models import GoogleAdWordsObjectDataset # type: ignore
@@ -1001,6 +1020,10 @@
from ._models import IntegrationRuntimeMonitoringData # type: ignore
from ._models import IntegrationRuntimeNodeIpAddress # type: ignore
from ._models import IntegrationRuntimeNodeMonitoringData # type: ignore
+ from ._models import IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint # type: ignore
+ from ._models import IntegrationRuntimeOutboundNetworkDependenciesEndpoint # type: ignore
+ from ._models import IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails # type: ignore
+ from ._models import IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse # type: ignore
from ._models import IntegrationRuntimeReference # type: ignore
from ._models import IntegrationRuntimeRegenerateKeyParameters # type: ignore
from ._models import IntegrationRuntimeResource # type: ignore
@@ -1036,6 +1059,7 @@
from ._models import MagentoLinkedService # type: ignore
from ._models import MagentoObjectDataset # type: ignore
from ._models import MagentoSource # type: ignore
+ from ._models import ManagedIdentityCredential # type: ignore
from ._models import ManagedIntegrationRuntime # type: ignore
from ._models import ManagedIntegrationRuntimeError # type: ignore
from ._models import ManagedIntegrationRuntimeNode # type: ignore
@@ -1055,12 +1079,14 @@
from ._models import MarketoLinkedService # type: ignore
from ._models import MarketoObjectDataset # type: ignore
from ._models import MarketoSource # type: ignore
+ from ._models import MetadataItem # type: ignore
from ._models import MicrosoftAccessLinkedService # type: ignore
from ._models import MicrosoftAccessSink # type: ignore
from ._models import MicrosoftAccessSource # type: ignore
from ._models import MicrosoftAccessTableDataset # type: ignore
from ._models import MongoDbAtlasCollectionDataset # type: ignore
from ._models import MongoDbAtlasLinkedService # type: ignore
+ from ._models import MongoDbAtlasSink # type: ignore
from ._models import MongoDbAtlasSource # type: ignore
from ._models import MongoDbCollectionDataset # type: ignore
from ._models import MongoDbCursorMethodsProperties # type: ignore
@@ -1068,6 +1094,7 @@
from ._models import MongoDbSource # type: ignore
from ._models import MongoDbV2CollectionDataset # type: ignore
from ._models import MongoDbV2LinkedService # type: ignore
+ from ._models import MongoDbV2Sink # type: ignore
from ._models import MongoDbV2Source # type: ignore
from ._models import MultiplePipelineTrigger # type: ignore
from ._models import MySqlLinkedService # type: ignore
@@ -1216,6 +1243,7 @@
from ._models import ServiceNowLinkedService # type: ignore
from ._models import ServiceNowObjectDataset # type: ignore
from ._models import ServiceNowSource # type: ignore
+ from ._models import ServicePrincipalCredential # type: ignore
from ._models import SetVariableActivity # type: ignore
from ._models import SftpLocation # type: ignore
from ._models import SftpReadSettings # type: ignore
@@ -1240,6 +1268,7 @@
from ._models import SqlAlwaysEncryptedProperties # type: ignore
from ._models import SqlDwSink # type: ignore
from ._models import SqlDwSource # type: ignore
+ from ._models import SqlDwUpsertSettings # type: ignore
from ._models import SqlMiSink # type: ignore
from ._models import SqlMiSource # type: ignore
from ._models import SqlPartitionSettings # type: ignore
@@ -1250,6 +1279,7 @@
from ._models import SqlServerTableDataset # type: ignore
from ._models import SqlSink # type: ignore
from ._models import SqlSource # type: ignore
+ from ._models import SqlUpsertSettings # type: ignore
from ._models import SquareLinkedService # type: ignore
from ._models import SquareObjectDataset # type: ignore
from ._models import SquareSource # type: ignore
@@ -1356,7 +1386,6 @@
DependencyCondition,
DynamicsAuthenticationType,
DynamicsDeploymentType,
- DynamicsServicePrincipalCredentialType,
DynamicsSinkWriteBehavior,
EventSubscriptionStatus,
FactoryIdentityType,
@@ -1410,12 +1439,15 @@
SapTablePartitionOption,
SelfHostedIntegrationRuntimeNodeStatus,
ServiceNowAuthenticationType,
+ ServicePrincipalCredentialType,
SftpAuthenticationType,
SparkAuthenticationType,
SparkServerType,
SparkThriftTransportProtocol,
SqlAlwaysEncryptedAkvAuthType,
+ SqlDwWriteBehaviorEnum,
SqlPartitionOption,
+ SqlWriteBehaviorEnum,
SsisLogLocationType,
SsisObjectMetadataType,
SsisPackageLocationType,
@@ -1583,6 +1615,9 @@
'CreateDataFlowDebugSessionResponse',
'CreateLinkedIntegrationRuntimeRequest',
'CreateRunResponse',
+ 'Credential',
+ 'CredentialReference',
+ 'CredentialResource',
'CustomActivity',
'CustomActivityReferenceObject',
'CustomDataSourceLinkedService',
@@ -1703,6 +1738,7 @@
'GetSsisObjectMetadataRequest',
'GitHubAccessTokenRequest',
'GitHubAccessTokenResponse',
+ 'GitHubClientSecret',
'GlobalParameterSpecification',
'GoogleAdWordsLinkedService',
'GoogleAdWordsObjectDataset',
@@ -1762,6 +1798,10 @@
'IntegrationRuntimeMonitoringData',
'IntegrationRuntimeNodeIpAddress',
'IntegrationRuntimeNodeMonitoringData',
+ 'IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint',
+ 'IntegrationRuntimeOutboundNetworkDependenciesEndpoint',
+ 'IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails',
+ 'IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse',
'IntegrationRuntimeReference',
'IntegrationRuntimeRegenerateKeyParameters',
'IntegrationRuntimeResource',
@@ -1797,6 +1837,7 @@
'MagentoLinkedService',
'MagentoObjectDataset',
'MagentoSource',
+ 'ManagedIdentityCredential',
'ManagedIntegrationRuntime',
'ManagedIntegrationRuntimeError',
'ManagedIntegrationRuntimeNode',
@@ -1816,12 +1857,14 @@
'MarketoLinkedService',
'MarketoObjectDataset',
'MarketoSource',
+ 'MetadataItem',
'MicrosoftAccessLinkedService',
'MicrosoftAccessSink',
'MicrosoftAccessSource',
'MicrosoftAccessTableDataset',
'MongoDbAtlasCollectionDataset',
'MongoDbAtlasLinkedService',
+ 'MongoDbAtlasSink',
'MongoDbAtlasSource',
'MongoDbCollectionDataset',
'MongoDbCursorMethodsProperties',
@@ -1829,6 +1872,7 @@
'MongoDbSource',
'MongoDbV2CollectionDataset',
'MongoDbV2LinkedService',
+ 'MongoDbV2Sink',
'MongoDbV2Source',
'MultiplePipelineTrigger',
'MySqlLinkedService',
@@ -1977,6 +2021,7 @@
'ServiceNowLinkedService',
'ServiceNowObjectDataset',
'ServiceNowSource',
+ 'ServicePrincipalCredential',
'SetVariableActivity',
'SftpLocation',
'SftpReadSettings',
@@ -2001,6 +2046,7 @@
'SqlAlwaysEncryptedProperties',
'SqlDwSink',
'SqlDwSource',
+ 'SqlDwUpsertSettings',
'SqlMiSink',
'SqlMiSource',
'SqlPartitionSettings',
@@ -2011,6 +2057,7 @@
'SqlServerTableDataset',
'SqlSink',
'SqlSource',
+ 'SqlUpsertSettings',
'SquareLinkedService',
'SquareObjectDataset',
'SquareSource',
@@ -2115,7 +2162,6 @@
'DependencyCondition',
'DynamicsAuthenticationType',
'DynamicsDeploymentType',
- 'DynamicsServicePrincipalCredentialType',
'DynamicsSinkWriteBehavior',
'EventSubscriptionStatus',
'FactoryIdentityType',
@@ -2169,12 +2215,15 @@
'SapTablePartitionOption',
'SelfHostedIntegrationRuntimeNodeStatus',
'ServiceNowAuthenticationType',
+ 'ServicePrincipalCredentialType',
'SftpAuthenticationType',
'SparkAuthenticationType',
'SparkServerType',
'SparkThriftTransportProtocol',
'SqlAlwaysEncryptedAkvAuthType',
+ 'SqlDwWriteBehaviorEnum',
'SqlPartitionOption',
+ 'SqlWriteBehaviorEnum',
'SsisLogLocationType',
'SsisObjectMetadataType',
'SsisPackageLocationType',
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py
index 1e1c0d92c7d..4d250610be9 100644
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py
+++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py
@@ -77,14 +77,16 @@ class CassandraSourceReadConsistencyLevels(with_metaclass(_CaseInsensitiveEnumMe
LOCAL_SERIAL = "LOCAL_SERIAL"
class CompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """All available compressionCodec values.
+ """
NONE = "none"
- GZIP = "gzip"
- SNAPPY = "snappy"
LZO = "lzo"
BZIP2 = "bzip2"
+ GZIP = "gzip"
DEFLATE = "deflate"
ZIP_DEFLATE = "zipDeflate"
+ SNAPPY = "snappy"
LZ4 = "lz4"
TAR = "tar"
TAR_G_ZIP = "tarGZip"
@@ -174,9 +176,7 @@ class DependencyCondition(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
COMPLETED = "Completed"
class DynamicsAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
- """The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd'
- for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in
- online scenario. Type: string (or Expression with resultType string).
+ """All available dynamicsAuthenticationType values.
"""
OFFICE365 = "Office365"
@@ -184,23 +184,12 @@ class DynamicsAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, E
AAD_SERVICE_PRINCIPAL = "AADServicePrincipal"
class DynamicsDeploymentType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
- """The deployment type of the Dynamics instance. 'Online' for Dynamics Online and
- 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or Expression with
- resultType string).
+ """All available dynamicsDeploymentType values.
"""
ONLINE = "Online"
ON_PREMISES_WITH_IFD = "OnPremisesWithIfd"
-class DynamicsServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
- """The service principal credential type to use in Server-To-Server authentication.
- 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or
- Expression with resultType string).
- """
-
- SERVICE_PRINCIPAL_KEY = "ServicePrincipalKey"
- SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert"
-
class DynamicsSinkWriteBehavior(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Defines values for DynamicsSinkWriteBehavior.
"""
@@ -267,7 +256,7 @@ class HBaseAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum
BASIC = "Basic"
class HdiNodeTypes(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
- """The node types on which the script action should be executed.
+ """All available HdiNodeTypes values.
"""
HEADNODE = "Headnode"
@@ -417,8 +406,7 @@ class JsonFormatFilePattern(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum))
ARRAY_OF_OBJECTS = "arrayOfObjects"
class JsonWriteFilePattern(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
- """File pattern of JSON. This setting controls the way a collection of JSON objects will be
- treated. The default value is 'setOfObjects'. It is case-sensitive.
+ """All available filePatterns.
"""
SET_OF_OBJECTS = "setOfObjects"
@@ -661,6 +649,13 @@ class ServiceNowAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str,
BASIC = "Basic"
O_AUTH2 = "OAuth2"
+class ServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """All available servicePrincipalCredentialType values.
+ """
+
+ SERVICE_PRINCIPAL_KEY = "ServicePrincipalKey"
+ SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert"
+
class SftpAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The authentication type to be used to connect to the FTP server.
"""
@@ -702,6 +697,13 @@ class SqlAlwaysEncryptedAkvAuthType(with_metaclass(_CaseInsensitiveEnumMeta, str
SERVICE_PRINCIPAL = "ServicePrincipal"
MANAGED_IDENTITY = "ManagedIdentity"
+class SqlDwWriteBehaviorEnum(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Specify the write behavior when copying data into sql dw.
+ """
+
+ INSERT = "Insert"
+ UPSERT = "Upsert"
+
class SqlPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The partition mechanism that will be used for Sql read in parallel.
"""
@@ -710,6 +712,14 @@ class SqlPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
PHYSICAL_PARTITIONS_OF_TABLE = "PhysicalPartitionsOfTable"
DYNAMIC_RANGE = "DynamicRange"
+class SqlWriteBehaviorEnum(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Specify the write behavior when copying data into sql.
+ """
+
+ INSERT = "Insert"
+ UPSERT = "Upsert"
+ STORED_PROCEDURE = "StoredProcedure"
+
class SsisLogLocationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The type of SSIS log location.
"""
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py
index e97fd0ab305..908eb2ef90c 100644
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py
+++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py
@@ -634,6 +634,9 @@ class CopySource(msrest.serialization.Model):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
"""
_validation = {
@@ -646,6 +649,7 @@ class CopySource(msrest.serialization.Model):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
}
_subtype_map = {
@@ -662,6 +666,7 @@ def __init__(
self.source_retry_count = kwargs.get('source_retry_count', None)
self.source_retry_wait = kwargs.get('source_retry_wait', None)
self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None)
+ self.disable_metrics_collection = kwargs.get('disable_metrics_collection', None)
class TabularSource(CopySource):
@@ -686,12 +691,15 @@ class TabularSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -704,8 +712,9 @@ class TabularSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
_subtype_map = {
@@ -741,12 +750,15 @@ class AmazonMwsSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -762,8 +774,9 @@ class AmazonMwsSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -868,12 +881,15 @@ class AmazonRedshiftSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
:param redshift_unload_settings: The Amazon S3 settings needed for the interim Amazon S3 when
@@ -892,8 +908,9 @@ class AmazonRedshiftSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'},
}
@@ -1156,6 +1173,9 @@ class StoreReadSettings(msrest.serialization.Model):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
"""
_validation = {
@@ -1166,6 +1186,7 @@ class StoreReadSettings(msrest.serialization.Model):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
}
_subtype_map = {
@@ -1180,6 +1201,7 @@ def __init__(
self.additional_properties = kwargs.get('additional_properties', None)
self.type = 'StoreReadSettings' # type: str
self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None)
+ self.disable_metrics_collection = kwargs.get('disable_metrics_collection', None)
class AmazonS3CompatibleReadSettings(StoreReadSettings):
@@ -1195,6 +1217,9 @@ class AmazonS3CompatibleReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -1235,6 +1260,7 @@ class AmazonS3CompatibleReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -1490,6 +1516,9 @@ class AmazonS3ReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -1530,6 +1559,7 @@ class AmazonS3ReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -1664,10 +1694,9 @@ class AvroDataset(Dataset):
:type folder: ~data_factory_management_client.models.DatasetFolder
:param location: The location of the avro storage.
:type location: ~data_factory_management_client.models.DatasetLocation
- :param avro_compression_codec: Possible values include: "none", "deflate", "snappy", "xz",
- "bzip2".
- :type avro_compression_codec: str or
- ~data_factory_management_client.models.AvroCompressionCodec
+ :param avro_compression_codec: The data avroCompressionCodec. Type: string (or Expression with
+ resultType string).
+ :type avro_compression_codec: object
:param avro_compression_level:
:type avro_compression_level: int
"""
@@ -1689,7 +1718,7 @@ class AvroDataset(Dataset):
'annotations': {'key': 'annotations', 'type': '[object]'},
'folder': {'key': 'folder', 'type': 'DatasetFolder'},
'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'},
- 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'},
+ 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'object'},
'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'},
}
@@ -1788,7 +1817,7 @@ class CopySink(msrest.serialization.Model):
"""A copy activity sink.
You probably want to use the sub-classes and not this class directly. Known
- sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDwSink, SqlMiSink, SqlServerSink, SqlSink.
+ sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, MongoDbAtlasSink, MongoDbV2Sink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDwSink, SqlMiSink, SqlServerSink, SqlSink.
All required parameters must be populated in order to send to Azure.
@@ -1812,6 +1841,9 @@ class CopySink(msrest.serialization.Model):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
"""
_validation = {
@@ -1826,10 +1858,11 @@ class CopySink(msrest.serialization.Model):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
}
_subtype_map = {
- 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDwSink', 'SqlMISink': 'SqlMiSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'}
+ 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'MongoDbAtlasSink': 'MongoDbAtlasSink', 'MongoDbV2Sink': 'MongoDbV2Sink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDwSink', 'SqlMISink': 'SqlMiSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'}
}
def __init__(
@@ -1844,6 +1877,7 @@ def __init__(
self.sink_retry_count = kwargs.get('sink_retry_count', None)
self.sink_retry_wait = kwargs.get('sink_retry_wait', None)
self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None)
+ self.disable_metrics_collection = kwargs.get('disable_metrics_collection', None)
class AvroSink(CopySink):
@@ -1871,6 +1905,9 @@ class AvroSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Avro store settings.
:type store_settings: ~data_factory_management_client.models.StoreWriteSettings
:param format_settings: Avro format settings.
@@ -1889,6 +1926,7 @@ class AvroSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'},
}
@@ -1922,11 +1960,14 @@ class AvroSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Avro store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -1939,8 +1980,9 @@ class AvroSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -2135,6 +2177,8 @@ class AzureBatchLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -2158,6 +2202,7 @@ class AzureBatchLinkedService(LinkedService):
'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'},
'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -2172,6 +2217,7 @@ def __init__(
self.pool_name = kwargs['pool_name']
self.linked_service_name = kwargs['linked_service_name']
self.encrypted_credential = kwargs.get('encrypted_credential', None)
+ self.credential = kwargs.get('credential', None)
class AzureBlobDataset(Dataset):
@@ -2374,6 +2420,8 @@ class AzureBlobFsLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -2395,6 +2443,7 @@ class AzureBlobFsLinkedService(LinkedService):
'tenant': {'key': 'typeProperties.tenant', 'type': 'object'},
'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -2410,6 +2459,7 @@ def __init__(
self.tenant = kwargs.get('tenant', None)
self.azure_cloud_type = kwargs.get('azure_cloud_type', None)
self.encrypted_credential = kwargs.get('encrypted_credential', None)
+ self.credential = kwargs.get('credential', None)
class AzureBlobFsLocation(DatasetLocation):
@@ -2467,6 +2517,9 @@ class AzureBlobFsReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -2504,6 +2557,7 @@ class AzureBlobFsReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -2557,8 +2611,14 @@ class AzureBlobFsSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
+ :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects
+ (or Expression with resultType array of objects).
+ :type metadata: list[~data_factory_management_client.models.MetadataItem]
"""
_validation = {
@@ -2573,7 +2633,9 @@ class AzureBlobFsSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
+ 'metadata': {'key': 'metadata', 'type': '[MetadataItem]'},
}
def __init__(
@@ -2583,6 +2645,7 @@ def __init__(
super(AzureBlobFsSink, self).__init__(**kwargs)
self.type = 'AzureBlobFSSink' # type: str
self.copy_behavior = kwargs.get('copy_behavior', None)
+ self.metadata = kwargs.get('metadata', None)
class AzureBlobFsSource(CopySource):
@@ -2604,6 +2667,9 @@ class AzureBlobFsSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType
boolean).
:type treat_empty_as_null: object
@@ -2625,6 +2691,7 @@ class AzureBlobFsSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'},
'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
@@ -2657,6 +2724,9 @@ class StoreWriteSettings(msrest.serialization.Model):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
"""
@@ -2669,6 +2739,7 @@ class StoreWriteSettings(msrest.serialization.Model):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
}
@@ -2684,6 +2755,7 @@ def __init__(
self.additional_properties = kwargs.get('additional_properties', None)
self.type = 'StoreWriteSettings' # type: str
self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None)
+ self.disable_metrics_collection = kwargs.get('disable_metrics_collection', None)
self.copy_behavior = kwargs.get('copy_behavior', None)
@@ -2700,6 +2772,9 @@ class AzureBlobFsWriteSettings(StoreWriteSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer
@@ -2715,6 +2790,7 @@ class AzureBlobFsWriteSettings(StoreWriteSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'},
}
@@ -2781,6 +2857,8 @@ class AzureBlobStorageLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: str
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -2805,6 +2883,7 @@ class AzureBlobStorageLinkedService(LinkedService):
'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'},
'account_kind': {'key': 'typeProperties.accountKind', 'type': 'str'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -2824,6 +2903,7 @@ def __init__(
self.azure_cloud_type = kwargs.get('azure_cloud_type', None)
self.account_kind = kwargs.get('account_kind', None)
self.encrypted_credential = kwargs.get('encrypted_credential', None)
+ self.credential = kwargs.get('credential', None)
class AzureBlobStorageLocation(DatasetLocation):
@@ -2881,6 +2961,9 @@ class AzureBlobStorageReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -2921,6 +3004,7 @@ class AzureBlobStorageReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -2964,6 +3048,9 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer
@@ -2979,6 +3066,7 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'},
}
@@ -3296,6 +3384,9 @@ class AzureDatabricksDeltaLakeSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType
string).
:type pre_copy_script: object
@@ -3316,6 +3407,7 @@ class AzureDatabricksDeltaLakeSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
'import_settings': {'key': 'importSettings', 'type': 'AzureDatabricksDeltaLakeImportCommand'},
}
@@ -3349,6 +3441,9 @@ class AzureDatabricksDeltaLakeSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with
resultType string).
:type query: object
@@ -3367,6 +3462,7 @@ class AzureDatabricksDeltaLakeSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'export_settings': {'key': 'exportSettings', 'type': 'AzureDatabricksDeltaLakeExportCommand'},
}
@@ -3464,6 +3560,8 @@ class AzureDatabricksLinkedService(LinkedService):
:param policy_id: The policy id for limiting the ability to configure clusters based on a user
defined set of rules. Type: string (or Expression with resultType string).
:type policy_id: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -3496,6 +3594,7 @@ class AzureDatabricksLinkedService(LinkedService):
'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
'policy_id': {'key': 'typeProperties.policyId', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -3522,6 +3621,7 @@ def __init__(
self.new_cluster_enable_elastic_disk = kwargs.get('new_cluster_enable_elastic_disk', None)
self.encrypted_credential = kwargs.get('encrypted_credential', None)
self.policy_id = kwargs.get('policy_id', None)
+ self.credential = kwargs.get('credential', None)
class ExecutionActivity(Activity):
@@ -3674,6 +3774,8 @@ class AzureDataExplorerLinkedService(LinkedService):
:param tenant: The name or ID of the tenant to which the service principal belongs. Type:
string (or Expression with resultType string).
:type tenant: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -3694,6 +3796,7 @@ class AzureDataExplorerLinkedService(LinkedService):
'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'},
'database': {'key': 'typeProperties.database', 'type': 'object'},
'tenant': {'key': 'typeProperties.tenant', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -3707,6 +3810,7 @@ def __init__(
self.service_principal_key = kwargs.get('service_principal_key', None)
self.database = kwargs['database']
self.tenant = kwargs.get('tenant', None)
+ self.credential = kwargs.get('credential', None)
class AzureDataExplorerSink(CopySink):
@@ -3734,6 +3838,9 @@ class AzureDataExplorerSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param ingestion_mapping_name: A name of a pre-created csv mapping that was defined on the
target Kusto table. Type: string.
:type ingestion_mapping_name: object
@@ -3757,6 +3864,7 @@ class AzureDataExplorerSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'},
'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'},
'flush_immediately': {'key': 'flushImmediately', 'type': 'object'},
@@ -3792,6 +3900,9 @@ class AzureDataExplorerSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Required. Database query. Should be a Kusto Query Language (KQL) query. Type:
string (or Expression with resultType string).
:type query: object
@@ -3802,8 +3913,8 @@ class AzureDataExplorerSource(CopySource):
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -3817,10 +3928,11 @@ class AzureDataExplorerSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'no_truncation': {'key': 'noTruncation', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -4098,6 +4210,8 @@ class AzureDataLakeStoreLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -4121,6 +4235,7 @@ class AzureDataLakeStoreLinkedService(LinkedService):
'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'},
'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -4138,6 +4253,7 @@ def __init__(
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group_name = kwargs.get('resource_group_name', None)
self.encrypted_credential = kwargs.get('encrypted_credential', None)
+ self.credential = kwargs.get('credential', None)
class AzureDataLakeStoreLocation(DatasetLocation):
@@ -4190,6 +4306,9 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -4235,6 +4354,7 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -4292,6 +4412,9 @@ class AzureDataLakeStoreSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param enable_adls_single_file_parallel: Single File Parallel.
@@ -4310,6 +4433,7 @@ class AzureDataLakeStoreSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'},
}
@@ -4343,6 +4467,9 @@ class AzureDataLakeStoreSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -4358,6 +4485,7 @@ class AzureDataLakeStoreSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
}
@@ -4383,6 +4511,9 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param expiry_date_time: Specifies the expiry time of the written files. The time is applied to
@@ -4399,6 +4530,7 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'expiry_date_time': {'key': 'expiryDateTime', 'type': 'object'},
}
@@ -4550,6 +4682,9 @@ class AzureFileStorageReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -4590,6 +4725,7 @@ class AzureFileStorageReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -4633,6 +4769,9 @@ class AzureFileStorageWriteSettings(StoreWriteSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
"""
@@ -4645,6 +4784,7 @@ class AzureFileStorageWriteSettings(StoreWriteSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
}
@@ -4754,6 +4894,13 @@ class AzureFunctionLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
+ :param resource_id: Allowed token audiences for azure function.
+ :type resource_id: object
+ :param authentication: Type of authentication (Required to specify MSI) used to connect to
+ AzureFunction. Type: string (or Expression with resultType string).
+ :type authentication: object
"""
_validation = {
@@ -4771,6 +4918,9 @@ class AzureFunctionLinkedService(LinkedService):
'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'},
'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
+ 'resource_id': {'key': 'typeProperties.resourceId', 'type': 'object'},
+ 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'},
}
def __init__(
@@ -4782,6 +4932,9 @@ def __init__(
self.function_app_url = kwargs['function_app_url']
self.function_key = kwargs.get('function_key', None)
self.encrypted_credential = kwargs.get('encrypted_credential', None)
+ self.credential = kwargs.get('credential', None)
+ self.resource_id = kwargs.get('resource_id', None)
+ self.authentication = kwargs.get('authentication', None)
class AzureKeyVaultLinkedService(LinkedService):
@@ -4805,6 +4958,8 @@ class AzureKeyVaultLinkedService(LinkedService):
:param base_url: Required. The base URL of the Azure Key Vault. e.g.
https://myakv.vault.azure.net Type: string (or Expression with resultType string).
:type base_url: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -4820,6 +4975,7 @@ class AzureKeyVaultLinkedService(LinkedService):
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -4829,6 +4985,7 @@ def __init__(
super(AzureKeyVaultLinkedService, self).__init__(**kwargs)
self.type = 'AzureKeyVault' # type: str
self.base_url = kwargs['base_url']
+ self.credential = kwargs.get('credential', None)
class SecretBase(msrest.serialization.Model):
@@ -4979,12 +5136,15 @@ class AzureMariaDbSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -5000,8 +5160,9 @@ class AzureMariaDbSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -5278,6 +5439,9 @@ class AzureMlLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: object
+ :param authentication: Type of authentication (Required to specify MSI) used to connect to
+ AzureML. Type: string (or Expression with resultType string).
+ :type authentication: object
"""
_validation = {
@@ -5300,6 +5464,7 @@ class AzureMlLinkedService(LinkedService):
'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'},
'tenant': {'key': 'typeProperties.tenant', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
+ 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'},
}
def __init__(
@@ -5315,6 +5480,7 @@ def __init__(
self.service_principal_key = kwargs.get('service_principal_key', None)
self.tenant = kwargs.get('tenant', None)
self.encrypted_credential = kwargs.get('encrypted_credential', None)
+ self.authentication = kwargs.get('authentication', None)
class AzureMlServiceLinkedService(LinkedService):
@@ -5580,6 +5746,9 @@ class AzureMySqlSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: A query to execute before starting the copy. Type: string (or
Expression with resultType string).
:type pre_copy_script: object
@@ -5597,6 +5766,7 @@ class AzureMySqlSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
}
@@ -5628,12 +5798,15 @@ class AzureMySqlSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
"""
@@ -5648,8 +5821,9 @@ class AzureMySqlSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -5807,6 +5981,9 @@ class AzurePostgreSqlSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: A query to execute before starting the copy. Type: string (or
Expression with resultType string).
:type pre_copy_script: object
@@ -5824,6 +6001,7 @@ class AzurePostgreSqlSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
}
@@ -5855,12 +6033,15 @@ class AzurePostgreSqlSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -5876,8 +6057,9 @@ class AzurePostgreSqlSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -5984,6 +6166,9 @@ class AzureQueueSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
"""
_validation = {
@@ -5998,6 +6183,7 @@ class AzureQueueSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
}
def __init__(
@@ -6093,6 +6279,9 @@ class AzureSearchIndexSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: Specify the write behavior when upserting documents into Azure Search
Index. Possible values include: "Merge", "Upload".
:type write_behavior: str or
@@ -6111,6 +6300,7 @@ class AzureSearchIndexSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'str'},
}
@@ -6223,6 +6413,8 @@ class AzureSqlDatabaseLinkedService(LinkedService):
:param always_encrypted_settings: Sql always encrypted properties.
:type always_encrypted_settings:
~data_factory_management_client.models.SqlAlwaysEncryptedProperties
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -6245,6 +6437,7 @@ class AzureSqlDatabaseLinkedService(LinkedService):
'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -6261,6 +6454,7 @@ def __init__(
self.azure_cloud_type = kwargs.get('azure_cloud_type', None)
self.encrypted_credential = kwargs.get('encrypted_credential', None)
self.always_encrypted_settings = kwargs.get('always_encrypted_settings', None)
+ self.credential = kwargs.get('credential', None)
class AzureSqlDwLinkedService(LinkedService):
@@ -6303,6 +6497,8 @@ class AzureSqlDwLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -6324,6 +6520,7 @@ class AzureSqlDwLinkedService(LinkedService):
'tenant': {'key': 'typeProperties.tenant', 'type': 'object'},
'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -6339,6 +6536,7 @@ def __init__(
self.tenant = kwargs.get('tenant', None)
self.azure_cloud_type = kwargs.get('azure_cloud_type', None)
self.encrypted_credential = kwargs.get('encrypted_credential', None)
+ self.credential = kwargs.get('credential', None)
class AzureSqlDwTableDataset(Dataset):
@@ -6453,6 +6651,8 @@ class AzureSqlMiLinkedService(LinkedService):
:param always_encrypted_settings: Sql always encrypted properties.
:type always_encrypted_settings:
~data_factory_management_client.models.SqlAlwaysEncryptedProperties
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -6475,6 +6675,7 @@ class AzureSqlMiLinkedService(LinkedService):
'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -6491,6 +6692,7 @@ def __init__(
self.azure_cloud_type = kwargs.get('azure_cloud_type', None)
self.encrypted_credential = kwargs.get('encrypted_credential', None)
self.always_encrypted_settings = kwargs.get('always_encrypted_settings', None)
+ self.credential = kwargs.get('credential', None)
class AzureSqlMiTableDataset(Dataset):
@@ -6587,6 +6789,9 @@ class AzureSqlSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or
Expression with resultType string).
:type sql_writer_stored_procedure_name: object
@@ -6605,6 +6810,14 @@ class AzureSqlSink(CopySink):
:param table_option: The option to handle sink table, such as autoCreate. For now only
'autoCreate' value is supported. Type: string (or Expression with resultType string).
:type table_option: object
+ :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or
+ Expression with resultType boolean).
+ :type sql_writer_use_table_lock: object
+ :param write_behavior: Write behavior when copying data into Azure SQL. Type:
+ SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum).
+ :type write_behavior: object
+ :param upsert_settings: SQL upsert settings.
+ :type upsert_settings: ~data_factory_management_client.models.SqlUpsertSettings
"""
_validation = {
@@ -6619,12 +6832,16 @@ class AzureSqlSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'},
'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'},
'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'},
'table_option': {'key': 'tableOption', 'type': 'object'},
+ 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'},
+ 'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
+ 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'},
}
def __init__(
@@ -6639,6 +6856,9 @@ def __init__(
self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None)
self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None)
self.table_option = kwargs.get('table_option', None)
+ self.sql_writer_use_table_lock = kwargs.get('sql_writer_use_table_lock', None)
+ self.write_behavior = kwargs.get('write_behavior', None)
+ self.upsert_settings = kwargs.get('upsert_settings', None)
class AzureSqlSource(TabularSource):
@@ -6660,12 +6880,15 @@ class AzureSqlSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string).
:type sql_reader_query: object
:param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database
@@ -6695,8 +6918,9 @@ class AzureSqlSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'},
'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'},
@@ -6938,6 +7162,9 @@ class AzureTableSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param azure_table_default_partition_key_value: Azure Table default partition key value. Type:
string (or Expression with resultType string).
:type azure_table_default_partition_key_value: object
@@ -6964,6 +7191,7 @@ class AzureTableSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'},
'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'},
'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'},
@@ -7001,12 +7229,15 @@ class AzureTableSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param azure_table_source_query: Azure Table source query. Type: string (or Expression with
resultType string).
:type azure_table_source_query: object
@@ -7025,8 +7256,9 @@ class AzureTableSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'},
'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'},
}
@@ -7263,6 +7495,9 @@ class BinarySink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Binary store settings.
:type store_settings: ~data_factory_management_client.models.StoreWriteSettings
"""
@@ -7279,6 +7514,7 @@ class BinarySink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'},
}
@@ -7310,6 +7546,9 @@ class BinarySource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Binary store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param format_settings: Binary format settings.
@@ -7326,6 +7565,7 @@ class BinarySource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'BinaryReadSettings'},
}
@@ -7543,6 +7783,9 @@ class BlobSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param blob_writer_overwrite_files: Blob writer overwrite files. Type: boolean (or Expression
with resultType boolean).
:type blob_writer_overwrite_files: object
@@ -7554,6 +7797,9 @@ class BlobSink(CopySink):
:type blob_writer_add_header: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
+ :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects
+ (or Expression with resultType array of objects).
+ :type metadata: list[~data_factory_management_client.models.MetadataItem]
"""
_validation = {
@@ -7568,10 +7814,12 @@ class BlobSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'},
'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'},
'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
+ 'metadata': {'key': 'metadata', 'type': '[MetadataItem]'},
}
def __init__(
@@ -7584,6 +7832,7 @@ def __init__(
self.blob_writer_date_time_format = kwargs.get('blob_writer_date_time_format', None)
self.blob_writer_add_header = kwargs.get('blob_writer_add_header', None)
self.copy_behavior = kwargs.get('copy_behavior', None)
+ self.metadata = kwargs.get('metadata', None)
class BlobSource(CopySource):
@@ -7605,6 +7854,9 @@ class BlobSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType
boolean).
:type treat_empty_as_null: object
@@ -7626,6 +7878,7 @@ class BlobSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'},
'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
@@ -7794,12 +8047,15 @@ class CassandraSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: Database query. Should be a SQL-92 query expression or Cassandra Query Language
(CQL) command. Type: string (or Expression with resultType string).
:type query: object
@@ -7823,8 +8079,9 @@ class CassandraSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'consistency_level': {'key': 'consistencyLevel', 'type': 'str'},
}
@@ -8140,8 +8397,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService):
:param deployment_type: Required. The deployment type of the Common Data Service for Apps
instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common
Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType
- string). Possible values include: "Online", "OnPremisesWithIfd".
- :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType
+ string).
+ :type deployment_type: object
:param host_name: The host name of the on-premises Common Data Service for Apps server. The
property is required for on-prem and not allowed for online. Type: string (or Expression with
resultType string).
@@ -8162,10 +8419,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService):
:param authentication_type: Required. The authentication type to connect to Common Data Service
for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario.
'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or
- Expression with resultType string). Possible values include: "Office365", "Ifd",
- "AADServicePrincipal".
- :type authentication_type: str or
- ~data_factory_management_client.models.DynamicsAuthenticationType
+ Expression with resultType string).
+ :type authentication_type: object
:param username: User name to access the Common Data Service for Apps instance. Type: string
(or Expression with resultType string).
:type username: object
@@ -8176,10 +8431,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService):
:type service_principal_id: object
:param service_principal_credential_type: The service principal credential type to use in
Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert'
- for certificate. Type: string (or Expression with resultType string). Possible values include:
- "ServicePrincipalKey", "ServicePrincipalCert".
- :type service_principal_credential_type: str or
- ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType
+ for certificate. Type: string (or Expression with resultType string).
+ :type service_principal_credential_type: object
:param service_principal_credential: The credential of the service principal object in Azure
Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey',
servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If
@@ -8205,16 +8458,16 @@ class CommonDataServiceForAppsLinkedService(LinkedService):
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
- 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'},
+ 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'},
'host_name': {'key': 'typeProperties.hostName', 'type': 'object'},
'port': {'key': 'typeProperties.port', 'type': 'object'},
'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'},
'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'},
- 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'},
+ 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'},
'username': {'key': 'typeProperties.username', 'type': 'object'},
'password': {'key': 'typeProperties.password', 'type': 'SecretBase'},
'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'},
- 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'},
+ 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'},
'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
}
@@ -8264,6 +8517,9 @@ class CommonDataServiceForAppsSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: Required. The write behavior for the operation. Possible values include:
"Upsert".
:type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior
@@ -8289,6 +8545,7 @@ class CommonDataServiceForAppsSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'str'},
'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'},
'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'},
@@ -8324,12 +8581,15 @@ class CommonDataServiceForAppsSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: FetchXML is a proprietary query language that is used in Microsoft Common Data
Service for Apps (online & on-premises). Type: string (or Expression with resultType string).
:type query: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -8342,8 +8602,9 @@ class CommonDataServiceForAppsSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -8587,12 +8848,15 @@ class ConcurSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -8608,8 +8872,9 @@ class ConcurSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -9130,6 +9395,9 @@ class CosmosDbMongoDbApiSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: Specifies whether the document with same key to be overwritten (upsert)
rather than throw exception (insert). The default value is "insert". Type: string (or
Expression with resultType string). Type: string (or Expression with resultType string).
@@ -9148,6 +9416,7 @@ class CosmosDbMongoDbApiSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
}
@@ -9179,6 +9448,9 @@ class CosmosDbMongoDbApiSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param filter: Specifies selection filter using query operators. To return all documents in a
collection, omit this parameter or pass an empty document ({}). Type: string (or Expression
with resultType string).
@@ -9194,8 +9466,8 @@ class CosmosDbMongoDbApiSource(CopySource):
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -9208,11 +9480,12 @@ class CosmosDbMongoDbApiSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'filter': {'key': 'filter', 'type': 'object'},
'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'},
'batch_size': {'key': 'batchSize', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -9313,6 +9586,9 @@ class CosmosDbSqlApiSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or
Expression with resultType string). Allowed values: insert and upsert.
:type write_behavior: object
@@ -9330,6 +9606,7 @@ class CosmosDbSqlApiSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
}
@@ -9361,6 +9638,9 @@ class CosmosDbSqlApiSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: SQL API query. Type: string (or Expression with resultType string).
:type query: object
:param page_size: Page size of the result. Type: integer (or Expression with resultType
@@ -9373,8 +9653,8 @@ class CosmosDbSqlApiSource(CopySource):
Expression with resultType boolean).
:type detect_datetime: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -9387,11 +9667,12 @@ class CosmosDbSqlApiSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'page_size': {'key': 'pageSize', 'type': 'object'},
'preferred_regions': {'key': 'preferredRegions', 'type': 'object'},
'detect_datetime': {'key': 'detectDatetime', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -9482,12 +9763,15 @@ class CouchbaseSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -9503,8 +9787,9 @@ class CouchbaseSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -9691,6 +9976,172 @@ def __init__(
self.run_id = kwargs['run_id']
+class Credential(msrest.serialization.Model):
+ """The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute resource.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: ManagedIdentityCredential, ServicePrincipalCredential.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :type additional_properties: dict[str, object]
+ :param type: Required. Type of credential.Constant filled by server.
+ :type type: str
+ :param description: Credential description.
+ :type description: str
+ :param annotations: List of tags that can be used for describing the Credential.
+ :type annotations: list[object]
+ """
+
+ _validation = {
+ 'type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'additional_properties': {'key': '', 'type': '{object}'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'annotations': {'key': 'annotations', 'type': '[object]'},
+ }
+
+ _subtype_map = {
+ 'type': {'ManagedIdentity': 'ManagedIdentityCredential', 'ServicePrincipal': 'ServicePrincipalCredential'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Credential, self).__init__(**kwargs)
+ self.additional_properties = kwargs.get('additional_properties', None)
+ self.type = 'Credential' # type: str
+ self.description = kwargs.get('description', None)
+ self.annotations = kwargs.get('annotations', None)
+
+
+class CredentialReference(msrest.serialization.Model):
+ """Credential reference type.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :type additional_properties: dict[str, object]
+ :ivar type: Required. Credential reference type. Default value: "CredentialReference".
+ :vartype type: str
+ :param reference_name: Required. Reference credential name.
+ :type reference_name: str
+ """
+
+ _validation = {
+ 'type': {'required': True, 'constant': True},
+ 'reference_name': {'required': True},
+ }
+
+ _attribute_map = {
+ 'additional_properties': {'key': '', 'type': '{object}'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'reference_name': {'key': 'referenceName', 'type': 'str'},
+ }
+
+ type = "CredentialReference"
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CredentialReference, self).__init__(**kwargs)
+ self.additional_properties = kwargs.get('additional_properties', None)
+ self.reference_name = kwargs['reference_name']
+
+
+class SubResource(msrest.serialization.Model):
+ """Azure Data Factory nested resource, which belongs to a factory.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The resource identifier.
+ :vartype id: str
+ :ivar name: The resource name.
+ :vartype name: str
+ :ivar type: The resource type.
+ :vartype type: str
+ :ivar etag: Etag identifies change in the resource.
+ :vartype etag: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'etag': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'etag': {'key': 'etag', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SubResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.etag = None
+
+
+class CredentialResource(SubResource):
+ """Credential resource type.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: The resource identifier.
+ :vartype id: str
+ :ivar name: The resource name.
+ :vartype name: str
+ :ivar type: The resource type.
+ :vartype type: str
+ :ivar etag: Etag identifies change in the resource.
+ :vartype etag: str
+ :param properties: Required. Properties of credentials.
+ :type properties: ~data_factory_management_client.models.Credential
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'etag': {'readonly': True},
+ 'properties': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'etag': {'key': 'etag', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'Credential'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CredentialResource, self).__init__(**kwargs)
+ self.properties = kwargs['properties']
+
+
class CustomActivity(ExecutionActivity):
"""Custom activity type.
@@ -10546,46 +10997,6 @@ def __init__(
self.dataset_parameters = kwargs.get('dataset_parameters', None)
-class SubResource(msrest.serialization.Model):
- """Azure Data Factory nested resource, which belongs to a factory.
-
- Variables are only populated by the server, and will be ignored when sending a request.
-
- :ivar id: The resource identifier.
- :vartype id: str
- :ivar name: The resource name.
- :vartype name: str
- :ivar type: The resource type.
- :vartype type: str
- :ivar etag: Etag identifies change in the resource.
- :vartype etag: str
- """
-
- _validation = {
- 'id': {'readonly': True},
- 'name': {'readonly': True},
- 'type': {'readonly': True},
- 'etag': {'readonly': True},
- }
-
- _attribute_map = {
- 'id': {'key': 'id', 'type': 'str'},
- 'name': {'key': 'name', 'type': 'str'},
- 'type': {'key': 'type', 'type': 'str'},
- 'etag': {'key': 'etag', 'type': 'str'},
- }
-
- def __init__(
- self,
- **kwargs
- ):
- super(SubResource, self).__init__(**kwargs)
- self.id = None
- self.name = None
- self.type = None
- self.etag = None
-
-
class DataFlowResource(SubResource):
"""Data flow resource type.
@@ -10883,8 +11294,9 @@ class DatasetCompression(msrest.serialization.Model):
:param additional_properties: Unmatched properties from the message are deserialized to this
collection.
:type additional_properties: dict[str, object]
- :param type: Required. Type of dataset compression.Constant filled by server.
- :type type: str
+ :param type: Required. Type of dataset compression. Type: string (or Expression with resultType
+ string).Constant filled by server.
+ :type type: object
"""
_validation = {
@@ -10893,7 +11305,7 @@ class DatasetCompression(msrest.serialization.Model):
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
- 'type': {'key': 'type', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'object'},
}
_subtype_map = {
@@ -10917,8 +11329,9 @@ class DatasetBZip2Compression(DatasetCompression):
:param additional_properties: Unmatched properties from the message are deserialized to this
collection.
:type additional_properties: dict[str, object]
- :param type: Required. Type of dataset compression.Constant filled by server.
- :type type: str
+ :param type: Required. Type of dataset compression. Type: string (or Expression with resultType
+ string).Constant filled by server.
+ :type type: object
"""
_validation = {
@@ -10927,7 +11340,7 @@ class DatasetBZip2Compression(DatasetCompression):
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
- 'type': {'key': 'type', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'object'},
}
def __init__(
@@ -10997,10 +11410,11 @@ class DatasetDeflateCompression(DatasetCompression):
:param additional_properties: Unmatched properties from the message are deserialized to this
collection.
:type additional_properties: dict[str, object]
- :param type: Required. Type of dataset compression.Constant filled by server.
- :type type: str
- :param level: The Deflate compression level. Possible values include: "Optimal", "Fastest".
- :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel
+ :param type: Required. Type of dataset compression. Type: string (or Expression with resultType
+ string).Constant filled by server.
+ :type type: object
+ :param level: The Deflate compression level.
+ :type level: object
"""
_validation = {
@@ -11009,8 +11423,8 @@ class DatasetDeflateCompression(DatasetCompression):
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
- 'type': {'key': 'type', 'type': 'str'},
- 'level': {'key': 'level', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'object'},
+ 'level': {'key': 'level', 'type': 'object'},
}
def __init__(
@@ -11049,10 +11463,11 @@ class DatasetGZipCompression(DatasetCompression):
:param additional_properties: Unmatched properties from the message are deserialized to this
collection.
:type additional_properties: dict[str, object]
- :param type: Required. Type of dataset compression.Constant filled by server.
- :type type: str
- :param level: The GZip compression level. Possible values include: "Optimal", "Fastest".
- :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel
+ :param type: Required. Type of dataset compression. Type: string (or Expression with resultType
+ string).Constant filled by server.
+ :type type: object
+ :param level: The GZip compression level.
+ :type level: object
"""
_validation = {
@@ -11061,8 +11476,8 @@ class DatasetGZipCompression(DatasetCompression):
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
- 'type': {'key': 'type', 'type': 'str'},
- 'level': {'key': 'level', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'object'},
+ 'level': {'key': 'level', 'type': 'object'},
}
def __init__(
@@ -11219,8 +11634,9 @@ class DatasetTarCompression(DatasetCompression):
:param additional_properties: Unmatched properties from the message are deserialized to this
collection.
:type additional_properties: dict[str, object]
- :param type: Required. Type of dataset compression.Constant filled by server.
- :type type: str
+ :param type: Required. Type of dataset compression. Type: string (or Expression with resultType
+ string).Constant filled by server.
+ :type type: object
"""
_validation = {
@@ -11229,7 +11645,7 @@ class DatasetTarCompression(DatasetCompression):
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
- 'type': {'key': 'type', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'object'},
}
def __init__(
@@ -11248,10 +11664,11 @@ class DatasetTarGZipCompression(DatasetCompression):
:param additional_properties: Unmatched properties from the message are deserialized to this
collection.
:type additional_properties: dict[str, object]
- :param type: Required. Type of dataset compression.Constant filled by server.
- :type type: str
- :param level: The TarGZip compression level. Possible values include: "Optimal", "Fastest".
- :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel
+ :param type: Required. Type of dataset compression. Type: string (or Expression with resultType
+ string).Constant filled by server.
+ :type type: object
+ :param level: The TarGZip compression level.
+ :type level: object
"""
_validation = {
@@ -11260,8 +11677,8 @@ class DatasetTarGZipCompression(DatasetCompression):
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
- 'type': {'key': 'type', 'type': 'str'},
- 'level': {'key': 'level', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'object'},
+ 'level': {'key': 'level', 'type': 'object'},
}
def __init__(
@@ -11281,10 +11698,11 @@ class DatasetZipDeflateCompression(DatasetCompression):
:param additional_properties: Unmatched properties from the message are deserialized to this
collection.
:type additional_properties: dict[str, object]
- :param type: Required. Type of dataset compression.Constant filled by server.
- :type type: str
- :param level: The ZipDeflate compression level. Possible values include: "Optimal", "Fastest".
- :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel
+ :param type: Required. Type of dataset compression. Type: string (or Expression with resultType
+ string).Constant filled by server.
+ :type type: object
+ :param level: The ZipDeflate compression level.
+ :type level: object
"""
_validation = {
@@ -11293,8 +11711,8 @@ class DatasetZipDeflateCompression(DatasetCompression):
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
- 'type': {'key': 'type', 'type': 'str'},
- 'level': {'key': 'level', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'object'},
+ 'level': {'key': 'level', 'type': 'object'},
}
def __init__(
@@ -11413,12 +11831,15 @@ class Db2Source(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
"""
@@ -11433,8 +11854,9 @@ class Db2Source(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -11652,12 +12074,11 @@ class DelimitedTextDataset(Dataset):
https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with
resultType string).
:type encoding_name: object
- :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2",
- "deflate", "zipDeflate", "lz4", "tar", "tarGZip".
- :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec
- :param compression_level: The data compression method used for DelimitedText. Possible values
- include: "Optimal", "Fastest".
- :type compression_level: str or ~data_factory_management_client.models.DatasetCompressionLevel
+ :param compression_codec: The data compressionCodec. Type: string (or Expression with
+ resultType string).
+ :type compression_codec: object
+ :param compression_level: The data compression method used for DelimitedText.
+ :type compression_level: object
:param quote_char: The quote character. Type: string (or Expression with resultType string).
:type quote_char: object
:param escape_char: The escape character. Type: string (or Expression with resultType string).
@@ -11689,8 +12110,8 @@ class DelimitedTextDataset(Dataset):
'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'},
'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'},
'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'},
- 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'},
- 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'str'},
+ 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'},
+ 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'},
'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'},
'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'},
'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'},
@@ -11778,6 +12199,9 @@ class DelimitedTextSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: DelimitedText store settings.
:type store_settings: ~data_factory_management_client.models.StoreWriteSettings
:param format_settings: DelimitedText format settings.
@@ -11796,6 +12220,7 @@ class DelimitedTextSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'},
}
@@ -11829,13 +12254,16 @@ class DelimitedTextSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: DelimitedText store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param format_settings: DelimitedText format settings.
:type format_settings: ~data_factory_management_client.models.DelimitedTextReadSettings
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -11848,9 +12276,10 @@ class DelimitedTextSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -12070,6 +12499,9 @@ class DocumentDbCollectionSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or
Expression with resultType string).
:type nesting_separator: object
@@ -12090,6 +12522,7 @@ class DocumentDbCollectionSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
}
@@ -12123,6 +12556,9 @@ class DocumentDbCollectionSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Documents query. Type: string (or Expression with resultType string).
:type query: object
:param nesting_separator: Nested properties separator. Type: string (or Expression with
@@ -12132,8 +12568,8 @@ class DocumentDbCollectionSource(CopySource):
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -12146,10 +12582,11 @@ class DocumentDbCollectionSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -12239,12 +12676,15 @@ class DrillSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -12260,8 +12700,9 @@ class DrillSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -12552,12 +12993,15 @@ class DynamicsAxSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -12578,8 +13022,9 @@ class DynamicsAxSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'},
}
@@ -12673,9 +13118,8 @@ class DynamicsCrmLinkedService(LinkedService):
:type annotations: list[object]
:param deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online'
for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type:
- string (or Expression with resultType string). Possible values include: "Online",
- "OnPremisesWithIfd".
- :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType
+ string (or Expression with resultType string).
+ :type deployment_type: object
:param host_name: The host name of the on-premises Dynamics CRM server. The property is
required for on-prem and not allowed for online. Type: string (or Expression with resultType
string).
@@ -12694,10 +13138,8 @@ class DynamicsCrmLinkedService(LinkedService):
:param authentication_type: Required. The authentication type to connect to Dynamics CRM
server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario,
'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or
- Expression with resultType string). Possible values include: "Office365", "Ifd",
- "AADServicePrincipal".
- :type authentication_type: str or
- ~data_factory_management_client.models.DynamicsAuthenticationType
+ Expression with resultType string).
+ :type authentication_type: object
:param username: User name to access the Dynamics CRM instance. Type: string (or Expression
with resultType string).
:type username: object
@@ -12708,10 +13150,8 @@ class DynamicsCrmLinkedService(LinkedService):
:type service_principal_id: object
:param service_principal_credential_type: The service principal credential type to use in
Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert'
- for certificate. Type: string (or Expression with resultType string). Possible values include:
- "ServicePrincipalKey", "ServicePrincipalCert".
- :type service_principal_credential_type: str or
- ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType
+ for certificate. Type: string (or Expression with resultType string).
+ :type service_principal_credential_type: object
:param service_principal_credential: The credential of the service principal object in Azure
Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey',
servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If
@@ -12737,16 +13177,16 @@ class DynamicsCrmLinkedService(LinkedService):
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
- 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'},
+ 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'},
'host_name': {'key': 'typeProperties.hostName', 'type': 'object'},
'port': {'key': 'typeProperties.port', 'type': 'object'},
'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'},
'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'},
- 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'},
+ 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'},
'username': {'key': 'typeProperties.username', 'type': 'object'},
'password': {'key': 'typeProperties.password', 'type': 'SecretBase'},
'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'},
- 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'},
+ 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'},
'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
}
@@ -12796,6 +13236,9 @@ class DynamicsCrmSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: Required. The write behavior for the operation. Possible values include:
"Upsert".
:type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior
@@ -12821,6 +13264,7 @@ class DynamicsCrmSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'str'},
'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'},
'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'},
@@ -12856,12 +13300,15 @@ class DynamicsCrmSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM
(online & on-premises). Type: string (or Expression with resultType string).
:type query: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -12874,8 +13321,9 @@ class DynamicsCrmSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -12967,8 +13415,8 @@ class DynamicsLinkedService(LinkedService):
:type annotations: list[object]
:param deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for
Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or
- Expression with resultType string). Possible values include: "Online", "OnPremisesWithIfd".
- :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType
+ Expression with resultType string).
+ :type deployment_type: object
:param host_name: The host name of the on-premises Dynamics server. The property is required
for on-prem and not allowed for online. Type: string (or Expression with resultType string).
:type host_name: object
@@ -12986,9 +13434,8 @@ class DynamicsLinkedService(LinkedService):
:param authentication_type: Required. The authentication type to connect to Dynamics server.
'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal'
for Server-To-Server authentication in online scenario. Type: string (or Expression with
- resultType string). Possible values include: "Office365", "Ifd", "AADServicePrincipal".
- :type authentication_type: str or
- ~data_factory_management_client.models.DynamicsAuthenticationType
+ resultType string).
+ :type authentication_type: object
:param username: User name to access the Dynamics instance. Type: string (or Expression with
resultType string).
:type username: object
@@ -12999,10 +13446,8 @@ class DynamicsLinkedService(LinkedService):
:type service_principal_id: object
:param service_principal_credential_type: The service principal credential type to use in
Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert'
- for certificate. Type: string (or Expression with resultType string). Possible values include:
- "ServicePrincipalKey", "ServicePrincipalCert".
- :type service_principal_credential_type: str or
- ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType
+ for certificate. Type: string (or Expression with resultType string).
+ :type service_principal_credential_type: str
:param service_principal_credential: The credential of the service principal object in Azure
Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey',
servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If
@@ -13028,12 +13473,12 @@ class DynamicsLinkedService(LinkedService):
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
- 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'},
+ 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'},
'host_name': {'key': 'typeProperties.hostName', 'type': 'object'},
'port': {'key': 'typeProperties.port', 'type': 'object'},
'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'},
'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'},
- 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'},
+ 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'},
'username': {'key': 'typeProperties.username', 'type': 'object'},
'password': {'key': 'typeProperties.password', 'type': 'SecretBase'},
'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'},
@@ -13087,6 +13532,9 @@ class DynamicsSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: Required. The write behavior for the operation. Possible values include:
"Upsert".
:type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior
@@ -13112,6 +13560,7 @@ class DynamicsSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'str'},
'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'},
'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'},
@@ -13147,12 +13596,15 @@ class DynamicsSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics
(online & on-premises). Type: string (or Expression with resultType string).
:type query: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -13165,8 +13617,9 @@ class DynamicsSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -13334,12 +13787,15 @@ class EloquaSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -13355,8 +13811,9 @@ class EloquaSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -13498,9 +13955,12 @@ class ExcelDataset(Dataset):
:type folder: ~data_factory_management_client.models.DatasetFolder
:param location: The location of the excel storage.
:type location: ~data_factory_management_client.models.DatasetLocation
- :param sheet_name: The sheet of excel file. Type: string (or Expression with resultType
+ :param sheet_name: The sheet name of excel file. Type: string (or Expression with resultType
string).
:type sheet_name: object
+ :param sheet_index: The sheet index of excel file and default value is 0. Type: integer (or
+ Expression with resultType integer).
+ :type sheet_index: object
:param range: The partial data of one sheet. Type: string (or Expression with resultType
string).
:type range: object
@@ -13531,6 +13991,7 @@ class ExcelDataset(Dataset):
'folder': {'key': 'folder', 'type': 'DatasetFolder'},
'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'},
'sheet_name': {'key': 'typeProperties.sheetName', 'type': 'object'},
+ 'sheet_index': {'key': 'typeProperties.sheetIndex', 'type': 'object'},
'range': {'key': 'typeProperties.range', 'type': 'object'},
'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'},
'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'},
@@ -13545,6 +14006,7 @@ def __init__(
self.type = 'Excel' # type: str
self.location = kwargs.get('location', None)
self.sheet_name = kwargs.get('sheet_name', None)
+ self.sheet_index = kwargs.get('sheet_index', None)
self.range = kwargs.get('range', None)
self.first_row_as_header = kwargs.get('first_row_as_header', None)
self.compression = kwargs.get('compression', None)
@@ -13570,11 +14032,14 @@ class ExcelSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Excel store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -13587,8 +14052,9 @@ class ExcelSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -14216,6 +14682,10 @@ class FactoryGitHubConfiguration(FactoryRepoConfiguration):
:type last_commit_id: str
:param host_name: GitHub Enterprise host name. For example: https://github.mydomain.com.
:type host_name: str
+ :param client_id: GitHub bring your own app client id.
+ :type client_id: str
+ :param client_secret: GitHub bring your own app client secret information.
+ :type client_secret: ~data_factory_management_client.models.GitHubClientSecret
"""
_validation = {
@@ -14234,6 +14704,8 @@ class FactoryGitHubConfiguration(FactoryRepoConfiguration):
'root_folder': {'key': 'rootFolder', 'type': 'str'},
'last_commit_id': {'key': 'lastCommitId', 'type': 'str'},
'host_name': {'key': 'hostName', 'type': 'str'},
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ 'client_secret': {'key': 'clientSecret', 'type': 'GitHubClientSecret'},
}
def __init__(
@@ -14243,6 +14715,8 @@ def __init__(
super(FactoryGitHubConfiguration, self).__init__(**kwargs)
self.type = 'FactoryGitHubConfiguration' # type: str
self.host_name = kwargs.get('host_name', None)
+ self.client_id = kwargs.get('client_id', None)
+ self.client_secret = kwargs.get('client_secret', None)
class FactoryIdentity(msrest.serialization.Model):
@@ -14527,6 +15001,9 @@ class FileServerReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -14567,6 +15044,7 @@ class FileServerReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -14610,6 +15088,9 @@ class FileServerWriteSettings(StoreWriteSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
"""
@@ -14622,6 +15103,7 @@ class FileServerWriteSettings(StoreWriteSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
}
@@ -14745,6 +15227,9 @@ class FileSystemSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
"""
@@ -14761,6 +15246,7 @@ class FileSystemSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
}
@@ -14792,12 +15278,15 @@ class FileSystemSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -14810,8 +15299,9 @@ class FileSystemSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -14951,6 +15441,9 @@ class FtpReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -14984,6 +15477,7 @@ class FtpReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -15247,6 +15741,8 @@ class GitHubAccessTokenRequest(msrest.serialization.Model):
:type git_hub_access_code: str
:param git_hub_client_id: GitHub application client ID.
:type git_hub_client_id: str
+ :param git_hub_client_secret: GitHub bring your own app client secret information.
+ :type git_hub_client_secret: ~data_factory_management_client.models.GitHubClientSecret
:param git_hub_access_token_base_url: Required. GitHub access token base URL.
:type git_hub_access_token_base_url: str
"""
@@ -15259,6 +15755,7 @@ class GitHubAccessTokenRequest(msrest.serialization.Model):
_attribute_map = {
'git_hub_access_code': {'key': 'gitHubAccessCode', 'type': 'str'},
'git_hub_client_id': {'key': 'gitHubClientId', 'type': 'str'},
+ 'git_hub_client_secret': {'key': 'gitHubClientSecret', 'type': 'GitHubClientSecret'},
'git_hub_access_token_base_url': {'key': 'gitHubAccessTokenBaseUrl', 'type': 'str'},
}
@@ -15269,6 +15766,7 @@ def __init__(
super(GitHubAccessTokenRequest, self).__init__(**kwargs)
self.git_hub_access_code = kwargs['git_hub_access_code']
self.git_hub_client_id = kwargs.get('git_hub_client_id', None)
+ self.git_hub_client_secret = kwargs.get('git_hub_client_secret', None)
self.git_hub_access_token_base_url = kwargs['git_hub_access_token_base_url']
@@ -15291,6 +15789,29 @@ def __init__(
self.git_hub_access_token = kwargs.get('git_hub_access_token', None)
+class GitHubClientSecret(msrest.serialization.Model):
+ """Client secret information for factory's bring your own app repository configuration.
+
+ :param byoa_secret_akv_url: Bring your own app client secret AKV URL.
+ :type byoa_secret_akv_url: str
+ :param byoa_secret_name: Bring your own app client secret name in AKV.
+ :type byoa_secret_name: str
+ """
+
+ _attribute_map = {
+ 'byoa_secret_akv_url': {'key': 'byoaSecretAkvUrl', 'type': 'str'},
+ 'byoa_secret_name': {'key': 'byoaSecretName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(GitHubClientSecret, self).__init__(**kwargs)
+ self.byoa_secret_akv_url = kwargs.get('byoa_secret_akv_url', None)
+ self.byoa_secret_name = kwargs.get('byoa_secret_name', None)
+
+
class GlobalParameterSpecification(msrest.serialization.Model):
"""Definition of a single parameter for an entity.
@@ -15502,12 +16023,15 @@ class GoogleAdWordsSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -15523,8 +16047,9 @@ class GoogleAdWordsSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -15731,12 +16256,15 @@ class GoogleBigQuerySource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -15752,8 +16280,9 @@ class GoogleBigQuerySource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -15890,6 +16419,9 @@ class GoogleCloudStorageReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -15930,6 +16462,7 @@ class GoogleCloudStorageReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -16035,12 +16568,15 @@ class GreenplumSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -16056,8 +16592,9 @@ class GreenplumSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -16313,12 +16850,15 @@ class HBaseSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -16334,8 +16874,9 @@ class HBaseSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -16465,6 +17006,9 @@ class HdfsReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -16504,6 +17048,7 @@ class HdfsReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -16553,6 +17098,9 @@ class HdfsSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -16570,6 +17118,7 @@ class HdfsSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'},
}
@@ -16952,6 +17501,8 @@ class HdInsightOnDemandLinkedService(LinkedService):
:param subnet_name: The ARM resource ID for the subnet in the vNet. If virtualNetworkId was
specified, then this property is required. Type: string (or Expression with resultType string).
:type subnet_name: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -17005,6 +17556,7 @@ class HdInsightOnDemandLinkedService(LinkedService):
'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'},
'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'},
'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -17046,6 +17598,7 @@ def __init__(
self.script_actions = kwargs.get('script_actions', None)
self.virtual_network_id = kwargs.get('virtual_network_id', None)
self.subnet_name = kwargs.get('subnet_name', None)
+ self.credential = kwargs.get('credential', None)
class HdInsightPigActivity(ExecutionActivity):
@@ -17530,12 +18083,15 @@ class HiveSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -17551,8 +18107,9 @@ class HiveSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -17755,6 +18312,9 @@ class HttpReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param request_method: The HTTP method used to call the RESTful API. The default is GET. Type:
string (or Expression with resultType string).
:type request_method: object
@@ -17782,6 +18342,7 @@ class HttpReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'request_method': {'key': 'requestMethod', 'type': 'object'},
'request_body': {'key': 'requestBody', 'type': 'object'},
'additional_headers': {'key': 'additionalHeaders', 'type': 'object'},
@@ -17865,6 +18426,9 @@ class HttpSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param http_request_timeout: Specifies the timeout for a HTTP client to get HTTP response from
HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string
(or Expression with resultType string), pattern:
@@ -17882,6 +18446,7 @@ class HttpSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'},
}
@@ -18053,12 +18618,15 @@ class HubspotSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -18074,8 +18642,9 @@ class HubspotSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -18335,12 +18904,15 @@ class ImpalaSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -18356,8 +18928,9 @@ class ImpalaSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -18469,6 +19042,9 @@ class InformixSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: A query to execute before starting the copy. Type: string (or
Expression with resultType string).
:type pre_copy_script: object
@@ -18486,6 +19062,7 @@ class InformixSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
}
@@ -18517,12 +19094,15 @@ class InformixSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
"""
@@ -18537,8 +19117,9 @@ class InformixSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -18825,6 +19406,9 @@ class IntegrationRuntimeDataFlowProperties(msrest.serialization.Model):
:param time_to_live: Time to live (in minutes) setting of the cluster which will execute data
flow job.
:type time_to_live: int
+ :param cleanup: Cluster will not be recycled and it will be used in next data flow activity run
+ until TTL (time to live) is reached if this is set as false. Default is true.
+ :type cleanup: bool
"""
_validation = {
@@ -18836,6 +19420,7 @@ class IntegrationRuntimeDataFlowProperties(msrest.serialization.Model):
'compute_type': {'key': 'computeType', 'type': 'str'},
'core_count': {'key': 'coreCount', 'type': 'int'},
'time_to_live': {'key': 'timeToLive', 'type': 'int'},
+ 'cleanup': {'key': 'cleanup', 'type': 'bool'},
}
def __init__(
@@ -18847,6 +19432,7 @@ def __init__(
self.compute_type = kwargs.get('compute_type', None)
self.core_count = kwargs.get('core_count', None)
self.time_to_live = kwargs.get('time_to_live', None)
+ self.cleanup = kwargs.get('cleanup', None)
class IntegrationRuntimeDataProxyProperties(msrest.serialization.Model):
@@ -19047,6 +19633,93 @@ def __init__(
self.received_bytes = None
+class IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint(msrest.serialization.Model):
+ """Azure-SSIS integration runtime outbound network dependency endpoints for one category.
+
+ :param category: The category of outbound network dependency.
+ :type category: str
+ :param endpoints: The endpoints for outbound network dependency.
+ :type endpoints:
+ list[~data_factory_management_client.models.IntegrationRuntimeOutboundNetworkDependenciesEndpoint]
+ """
+
+ _attribute_map = {
+ 'category': {'key': 'category', 'type': 'str'},
+ 'endpoints': {'key': 'endpoints', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesEndpoint]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint, self).__init__(**kwargs)
+ self.category = kwargs.get('category', None)
+ self.endpoints = kwargs.get('endpoints', None)
+
+
+class IntegrationRuntimeOutboundNetworkDependenciesEndpoint(msrest.serialization.Model):
+ """The endpoint for Azure-SSIS integration runtime outbound network dependency.
+
+ :param domain_name: The domain name of endpoint.
+ :type domain_name: str
+ :param endpoint_details: The details of endpoint.
+ :type endpoint_details:
+ list[~data_factory_management_client.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails]
+ """
+
+ _attribute_map = {
+ 'domain_name': {'key': 'domainName', 'type': 'str'},
+ 'endpoint_details': {'key': 'endpointDetails', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(IntegrationRuntimeOutboundNetworkDependenciesEndpoint, self).__init__(**kwargs)
+ self.domain_name = kwargs.get('domain_name', None)
+ self.endpoint_details = kwargs.get('endpoint_details', None)
+
+
+class IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails(msrest.serialization.Model):
+ """The details of Azure-SSIS integration runtime outbound network dependency endpoint.
+
+ :param port: The port of endpoint.
+ :type port: int
+ """
+
+ _attribute_map = {
+ 'port': {'key': 'port', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails, self).__init__(**kwargs)
+ self.port = kwargs.get('port', None)
+
+
+class IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse(msrest.serialization.Model):
+ """Azure-SSIS integration runtime outbound network dependency endpoints.
+
+ :param value: The list of outbound network dependency endpoints.
+ :type value:
+ list[~data_factory_management_client.models.IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+
+
class IntegrationRuntimeReference(msrest.serialization.Model):
"""Integration runtime reference type.
@@ -19225,6 +19898,8 @@ class IntegrationRuntimeSsisProperties(msrest.serialization.Model):
list[~data_factory_management_client.models.CustomSetupBase]
:param package_stores: Package stores for the SSIS Integration Runtime.
:type package_stores: list[~data_factory_management_client.models.PackageStore]
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_attribute_map = {
@@ -19236,6 +19911,7 @@ class IntegrationRuntimeSsisProperties(msrest.serialization.Model):
'edition': {'key': 'edition', 'type': 'str'},
'express_custom_setup_properties': {'key': 'expressCustomSetupProperties', 'type': '[CustomSetupBase]'},
'package_stores': {'key': 'packageStores', 'type': '[PackageStore]'},
+ 'credential': {'key': 'credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -19251,6 +19927,7 @@ def __init__(
self.edition = kwargs.get('edition', None)
self.express_custom_setup_properties = kwargs.get('express_custom_setup_properties', None)
self.package_stores = kwargs.get('package_stores', None)
+ self.credential = kwargs.get('credential', None)
class IntegrationRuntimeStatus(msrest.serialization.Model):
@@ -19379,6 +20056,9 @@ class IntegrationRuntimeVNetProperties(msrest.serialization.Model):
:param public_i_ps: Resource IDs of the public IP addresses that this integration runtime will
use.
:type public_i_ps: list[str]
+ :param subnet_id: The ID of subnet, to which this Azure-SSIS integration runtime will be
+ joined.
+ :type subnet_id: str
"""
_attribute_map = {
@@ -19386,6 +20066,7 @@ class IntegrationRuntimeVNetProperties(msrest.serialization.Model):
'v_net_id': {'key': 'vNetId', 'type': 'str'},
'subnet': {'key': 'subnet', 'type': 'str'},
'public_i_ps': {'key': 'publicIPs', 'type': '[str]'},
+ 'subnet_id': {'key': 'subnetId', 'type': 'str'},
}
def __init__(
@@ -19397,6 +20078,7 @@ def __init__(
self.v_net_id = kwargs.get('v_net_id', None)
self.subnet = kwargs.get('subnet', None)
self.public_i_ps = kwargs.get('public_i_ps', None)
+ self.subnet_id = kwargs.get('subnet_id', None)
class JiraLinkedService(LinkedService):
@@ -19560,12 +20242,15 @@ class JiraSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -19581,8 +20266,9 @@ class JiraSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -19680,9 +20366,8 @@ class JsonFormat(DatasetStorageFormat):
:param deserializer: Deserializer. Type: string (or Expression with resultType string).
:type deserializer: object
:param file_pattern: File pattern of JSON. To be more specific, the way of separating a
- collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. Possible
- values include: "setOfObjects", "arrayOfObjects".
- :type file_pattern: str or ~data_factory_management_client.models.JsonFormatFilePattern
+ collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive.
+ :type file_pattern: object
:param nesting_separator: The character used to separate nesting levels. Default value is '.'
(dot). Type: string (or Expression with resultType string).
:type nesting_separator: object
@@ -19712,7 +20397,7 @@ class JsonFormat(DatasetStorageFormat):
'type': {'key': 'type', 'type': 'str'},
'serializer': {'key': 'serializer', 'type': 'object'},
'deserializer': {'key': 'deserializer', 'type': 'object'},
- 'file_pattern': {'key': 'filePattern', 'type': 'str'},
+ 'file_pattern': {'key': 'filePattern', 'type': 'object'},
'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'},
'encoding_name': {'key': 'encodingName', 'type': 'object'},
'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'},
@@ -19790,6 +20475,9 @@ class JsonSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Json store settings.
:type store_settings: ~data_factory_management_client.models.StoreWriteSettings
:param format_settings: Json format settings.
@@ -19808,6 +20496,7 @@ class JsonSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'},
}
@@ -19841,13 +20530,16 @@ class JsonSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Json store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param format_settings: Json format settings.
:type format_settings: ~data_factory_management_client.models.JsonReadSettings
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -19860,9 +20552,10 @@ class JsonSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'JsonReadSettings'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -19887,9 +20580,8 @@ class JsonWriteSettings(FormatWriteSettings):
:param type: Required. The write setting type.Constant filled by server.
:type type: str
:param file_pattern: File pattern of JSON. This setting controls the way a collection of JSON
- objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. Possible
- values include: "setOfObjects", "arrayOfObjects".
- :type file_pattern: str or ~data_factory_management_client.models.JsonWriteFilePattern
+ objects will be treated. The default value is 'setOfObjects'. It is case-sensitive.
+ :type file_pattern: object
"""
_validation = {
@@ -19899,7 +20591,7 @@ class JsonWriteSettings(FormatWriteSettings):
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
- 'file_pattern': {'key': 'filePattern', 'type': 'str'},
+ 'file_pattern': {'key': 'filePattern', 'type': 'object'},
}
def __init__(
@@ -20538,12 +21230,15 @@ class MagentoSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -20559,8 +21254,9 @@ class MagentoSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -20573,6 +21269,45 @@ def __init__(
self.query = kwargs.get('query', None)
+class ManagedIdentityCredential(Credential):
+ """Managed identity credential.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :type additional_properties: dict[str, object]
+ :param type: Required. Type of credential.Constant filled by server.
+ :type type: str
+ :param description: Credential description.
+ :type description: str
+ :param annotations: List of tags that can be used for describing the Credential.
+ :type annotations: list[object]
+ :param resource_id: The resource id of user assigned managed identity.
+ :type resource_id: str
+ """
+
+ _validation = {
+ 'type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'additional_properties': {'key': '', 'type': '{object}'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'annotations': {'key': 'annotations', 'type': '[object]'},
+ 'resource_id': {'key': 'typeProperties.resourceId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ManagedIdentityCredential, self).__init__(**kwargs)
+ self.type = 'ManagedIdentity' # type: str
+ self.resource_id = kwargs.get('resource_id', None)
+
+
class ManagedIntegrationRuntime(IntegrationRuntime):
"""Managed integration runtime, including managed elastic and managed dedicated integration runtimes.
@@ -21219,12 +21954,15 @@ class MariaDbSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -21240,8 +21978,9 @@ class MariaDbSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -21466,12 +22205,15 @@ class MarketoSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -21487,8 +22229,9 @@ class MarketoSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -21501,6 +22244,29 @@ def __init__(
self.query = kwargs.get('query', None)
+class MetadataItem(msrest.serialization.Model):
+ """Specify the name and value of custom metadata item.
+
+ :param name: Metadata item key name. Type: string (or Expression with resultType string).
+ :type name: object
+ :param value: Metadata item value. Type: string (or Expression with resultType string).
+ :type value: object
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'object'},
+ 'value': {'key': 'value', 'type': 'object'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(MetadataItem, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.value = kwargs.get('value', None)
+
+
class MicrosoftAccessLinkedService(LinkedService):
"""Microsoft Access linked service.
@@ -21600,6 +22366,9 @@ class MicrosoftAccessSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: A query to execute before starting the copy. Type: string (or
Expression with resultType string).
:type pre_copy_script: object
@@ -21617,6 +22386,7 @@ class MicrosoftAccessSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
}
@@ -21648,11 +22418,14 @@ class MicrosoftAccessSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -21665,8 +22438,9 @@ class MicrosoftAccessSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -21852,6 +22626,65 @@ def __init__(
self.database = kwargs['database']
+class MongoDbAtlasSink(CopySink):
+ """A copy activity MongoDB Atlas sink.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :type additional_properties: dict[str, object]
+ :param type: Required. Copy sink type.Constant filled by server.
+ :type type: str
+ :param write_batch_size: Write batch size. Type: integer (or Expression with resultType
+ integer), minimum: 0.
+ :type write_batch_size: object
+ :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType
+ string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ :type write_batch_timeout: object
+ :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType
+ integer).
+ :type sink_retry_count: object
+ :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string),
+ pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ :type sink_retry_wait: object
+ :param max_concurrent_connections: The maximum concurrent connection count for the sink data
+ store. Type: integer (or Expression with resultType integer).
+ :type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
+ :param write_behavior: Specifies whether the document with same key to be overwritten (upsert)
+ rather than throw exception (insert). The default value is "insert". Type: string (or
+ Expression with resultType string). Type: string (or Expression with resultType string).
+ :type write_behavior: object
+ """
+
+ _validation = {
+ 'type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'additional_properties': {'key': '', 'type': '{object}'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'},
+ 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'},
+ 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
+ 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
+ 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
+ 'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(MongoDbAtlasSink, self).__init__(**kwargs)
+ self.type = 'MongoDbAtlasSink' # type: str
+ self.write_behavior = kwargs.get('write_behavior', None)
+
+
class MongoDbAtlasSource(CopySource):
"""A copy activity source for a MongoDB Atlas database.
@@ -21871,6 +22704,9 @@ class MongoDbAtlasSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param filter: Specifies selection filter using query operators. To return all documents in a
collection, omit this parameter or pass an empty document ({}). Type: string (or Expression
with resultType string).
@@ -21886,8 +22722,8 @@ class MongoDbAtlasSource(CopySource):
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -21900,11 +22736,12 @@ class MongoDbAtlasSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'filter': {'key': 'filter', 'type': 'object'},
'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'},
'batch_size': {'key': 'batchSize', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -22136,12 +22973,15 @@ class MongoDbSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Database query. Should be a SQL-92 query expression. Type: string (or Expression
with resultType string).
:type query: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -22154,8 +22994,9 @@ class MongoDbSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -22281,6 +23122,65 @@ def __init__(
self.database = kwargs['database']
+class MongoDbV2Sink(CopySink):
+ """A copy activity MongoDB sink.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :type additional_properties: dict[str, object]
+ :param type: Required. Copy sink type.Constant filled by server.
+ :type type: str
+ :param write_batch_size: Write batch size. Type: integer (or Expression with resultType
+ integer), minimum: 0.
+ :type write_batch_size: object
+ :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType
+ string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ :type write_batch_timeout: object
+ :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType
+ integer).
+ :type sink_retry_count: object
+ :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string),
+ pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ :type sink_retry_wait: object
+ :param max_concurrent_connections: The maximum concurrent connection count for the sink data
+ store. Type: integer (or Expression with resultType integer).
+ :type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
+ :param write_behavior: Specifies whether the document with same key to be overwritten (upsert)
+ rather than throw exception (insert). The default value is "insert". Type: string (or
+ Expression with resultType string). Type: string (or Expression with resultType string).
+ :type write_behavior: object
+ """
+
+ _validation = {
+ 'type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'additional_properties': {'key': '', 'type': '{object}'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'},
+ 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'},
+ 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
+ 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
+ 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
+ 'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(MongoDbV2Sink, self).__init__(**kwargs)
+ self.type = 'MongoDbV2Sink' # type: str
+ self.write_behavior = kwargs.get('write_behavior', None)
+
+
class MongoDbV2Source(CopySource):
"""A copy activity source for a MongoDB database.
@@ -22300,6 +23200,9 @@ class MongoDbV2Source(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param filter: Specifies selection filter using query operators. To return all documents in a
collection, omit this parameter or pass an empty document ({}). Type: string (or Expression
with resultType string).
@@ -22315,8 +23218,8 @@ class MongoDbV2Source(CopySource):
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -22329,11 +23232,12 @@ class MongoDbV2Source(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'filter': {'key': 'filter', 'type': 'object'},
'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'},
'batch_size': {'key': 'batchSize', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -22424,12 +23328,15 @@ class MySqlSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
"""
@@ -22444,8 +23351,9 @@ class MySqlSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -22623,12 +23531,15 @@ class NetezzaSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -22649,8 +23560,9 @@ class NetezzaSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'partition_option': {'key': 'partitionOption', 'type': 'object'},
'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'},
@@ -22932,6 +23844,9 @@ class ODataSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: OData query. For example, "$top=1". Type: string (or Expression with resultType
string).
:type query: object
@@ -22941,8 +23856,8 @@ class ODataSource(CopySource):
((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type http_request_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -22955,9 +23870,10 @@ class ODataSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -23069,6 +23985,9 @@ class OdbcSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: A query to execute before starting the copy. Type: string (or
Expression with resultType string).
:type pre_copy_script: object
@@ -23086,6 +24005,7 @@ class OdbcSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
}
@@ -23117,12 +24037,15 @@ class OdbcSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
"""
@@ -23137,8 +24060,9 @@ class OdbcSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -23363,6 +24287,9 @@ class Office365Source(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param allowed_groups: The groups containing all the users. Type: array of strings (or
Expression with resultType array of strings).
:type allowed_groups: object
@@ -23394,6 +24321,7 @@ class Office365Source(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'allowed_groups': {'key': 'allowedGroups', 'type': 'object'},
'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'},
'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'},
@@ -23783,6 +24711,9 @@ class OracleCloudStorageReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -23823,6 +24754,7 @@ class OracleCloudStorageReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -24103,12 +25035,15 @@ class OracleServiceCloudSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -24124,8 +25059,9 @@ class OracleServiceCloudSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -24163,6 +25099,9 @@ class OracleSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType
string).
:type pre_copy_script: object
@@ -24180,6 +25119,7 @@ class OracleSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
}
@@ -24211,6 +25151,9 @@ class OracleSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param oracle_reader_query: Oracle reader query. Type: string (or Expression with resultType
string).
:type oracle_reader_query: object
@@ -24223,8 +25166,8 @@ class OracleSource(CopySource):
:param partition_settings: The settings that will be leveraged for Oracle source partitioning.
:type partition_settings: ~data_factory_management_client.models.OraclePartitionSettings
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -24237,11 +25180,12 @@ class OracleSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'partition_option': {'key': 'partitionOption', 'type': 'object'},
'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -24355,8 +25299,9 @@ class OrcDataset(Dataset):
:type folder: ~data_factory_management_client.models.DatasetFolder
:param location: The location of the ORC data storage.
:type location: ~data_factory_management_client.models.DatasetLocation
- :param orc_compression_codec: Possible values include: "none", "zlib", "snappy", "lzo".
- :type orc_compression_codec: str or ~data_factory_management_client.models.OrcCompressionCodec
+ :param orc_compression_codec: The data orcCompressionCodec. Type: string (or Expression with
+ resultType string).
+ :type orc_compression_codec: object
"""
_validation = {
@@ -24375,7 +25320,7 @@ class OrcDataset(Dataset):
'annotations': {'key': 'annotations', 'type': '[object]'},
'folder': {'key': 'folder', 'type': 'DatasetFolder'},
'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'},
- 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'str'},
+ 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'object'},
}
def __init__(
@@ -24448,6 +25393,9 @@ class OrcSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: ORC store settings.
:type store_settings: ~data_factory_management_client.models.StoreWriteSettings
:param format_settings: ORC format settings.
@@ -24466,6 +25414,7 @@ class OrcSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'},
}
@@ -24499,11 +25448,14 @@ class OrcSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: ORC store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -24516,8 +25468,9 @@ class OrcSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -24659,9 +25612,9 @@ class ParquetDataset(Dataset):
:type folder: ~data_factory_management_client.models.DatasetFolder
:param location: The location of the parquet storage.
:type location: ~data_factory_management_client.models.DatasetLocation
- :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2",
- "deflate", "zipDeflate", "lz4", "tar", "tarGZip".
- :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec
+ :param compression_codec: The data compressionCodec. Type: string (or Expression with
+ resultType string).
+ :type compression_codec: object
"""
_validation = {
@@ -24680,7 +25633,7 @@ class ParquetDataset(Dataset):
'annotations': {'key': 'annotations', 'type': '[object]'},
'folder': {'key': 'folder', 'type': 'DatasetFolder'},
'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'},
- 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'},
+ 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'},
}
def __init__(
@@ -24753,6 +25706,9 @@ class ParquetSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Parquet store settings.
:type store_settings: ~data_factory_management_client.models.StoreWriteSettings
:param format_settings: Parquet format settings.
@@ -24771,6 +25727,7 @@ class ParquetSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'},
}
@@ -24804,11 +25761,14 @@ class ParquetSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Parquet store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -24821,8 +25781,9 @@ class ParquetSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -25029,12 +25990,15 @@ class PaypalSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -25050,8 +26014,9 @@ class PaypalSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -25258,12 +26223,15 @@ class PhoenixSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -25279,8 +26247,9 @@ class PhoenixSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -25598,18 +26567,26 @@ class PipelineRunInvokedBy(msrest.serialization.Model):
:vartype id: str
:ivar invoked_by_type: The type of the entity that started the run.
:vartype invoked_by_type: str
+ :ivar pipeline_name: The name of the pipeline that triggered the run, if any.
+ :vartype pipeline_name: str
+ :ivar pipeline_run_id: The run id of the pipeline that triggered the run, if any.
+ :vartype pipeline_run_id: str
"""
_validation = {
'name': {'readonly': True},
'id': {'readonly': True},
'invoked_by_type': {'readonly': True},
+ 'pipeline_name': {'readonly': True},
+ 'pipeline_run_id': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'invoked_by_type': {'key': 'invokedByType', 'type': 'str'},
+ 'pipeline_name': {'key': 'pipelineName', 'type': 'str'},
+ 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'},
}
def __init__(
@@ -25620,6 +26597,8 @@ def __init__(
self.name = None
self.id = None
self.invoked_by_type = None
+ self.pipeline_name = None
+ self.pipeline_run_id = None
class PipelineRunsQueryResponse(msrest.serialization.Model):
@@ -25768,12 +26747,15 @@ class PostgreSqlSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
"""
@@ -25788,8 +26770,9 @@ class PostgreSqlSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -26072,12 +27055,15 @@ class PrestoSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -26093,8 +27079,9 @@ class PrestoSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -26544,12 +27531,15 @@ class QuickBooksSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -26565,8 +27555,9 @@ class QuickBooksSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -26739,11 +27730,14 @@ class RelationalSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -26756,8 +27750,9 @@ class RelationalSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -27085,12 +28080,15 @@ class ResponsysSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -27106,8 +28104,9 @@ class ResponsysSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -27254,6 +28253,8 @@ class RestServiceLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -27281,6 +28282,7 @@ class RestServiceLinkedService(LinkedService):
'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'},
'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -27301,6 +28303,7 @@ def __init__(
self.azure_cloud_type = kwargs.get('azure_cloud_type', None)
self.aad_resource_id = kwargs.get('aad_resource_id', None)
self.encrypted_credential = kwargs.get('encrypted_credential', None)
+ self.credential = kwargs.get('credential', None)
class RestSink(CopySink):
@@ -27328,6 +28331,9 @@ class RestSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param request_method: The HTTP method used to call the RESTful API. The default is POST. Type:
string (or Expression with resultType string).
:type request_method: object
@@ -27358,6 +28364,7 @@ class RestSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'request_method': {'key': 'requestMethod', 'type': 'object'},
'additional_headers': {'key': 'additionalHeaders', 'type': 'object'},
'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'},
@@ -27397,6 +28404,9 @@ class RestSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param request_method: The HTTP method used to call the RESTful API. The default is GET. Type:
string (or Expression with resultType string).
:type request_method: object
@@ -27417,8 +28427,8 @@ class RestSource(CopySource):
:param request_interval: The time to await before sending next page request.
:type request_interval: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -27431,13 +28441,14 @@ class RestSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'request_method': {'key': 'requestMethod', 'type': 'object'},
'request_body': {'key': 'requestBody', 'type': 'object'},
'additional_headers': {'key': 'additionalHeaders', 'type': 'object'},
'pagination_rules': {'key': 'paginationRules', 'type': 'object'},
'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'},
'request_interval': {'key': 'requestInterval', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -27832,12 +28843,15 @@ class SalesforceMarketingCloudSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -27853,8 +28867,9 @@ class SalesforceMarketingCloudSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -28087,6 +29102,9 @@ class SalesforceServiceCloudSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: The write behavior for the operation. Default is Insert. Possible values
include: "Insert", "Upsert".
:type write_behavior: str or ~data_factory_management_client.models.SalesforceSinkWriteBehavior
@@ -28114,6 +29132,7 @@ class SalesforceServiceCloudSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'str'},
'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'},
'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'},
@@ -28149,14 +29168,17 @@ class SalesforceServiceCloudSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
:param read_behavior: The read behavior for the operation. Default is Query. Possible values
include: "Query", "QueryAll".
:type read_behavior: str or ~data_factory_management_client.models.SalesforceSourceReadBehavior
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -28169,9 +29191,10 @@ class SalesforceServiceCloudSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'read_behavior': {'key': 'readBehavior', 'type': 'str'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -28210,6 +29233,9 @@ class SalesforceSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: The write behavior for the operation. Default is Insert. Possible values
include: "Insert", "Upsert".
:type write_behavior: str or ~data_factory_management_client.models.SalesforceSinkWriteBehavior
@@ -28237,6 +29263,7 @@ class SalesforceSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'str'},
'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'},
'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'},
@@ -28272,12 +29299,15 @@ class SalesforceSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
:param read_behavior: The read behavior for the operation. Default is Query. Possible values
@@ -28295,8 +29325,9 @@ class SalesforceSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'read_behavior': {'key': 'readBehavior', 'type': 'str'},
}
@@ -28458,12 +29489,15 @@ class SapBwSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: MDX query. Type: string (or Expression with resultType string).
:type query: object
"""
@@ -28478,8 +29512,9 @@ class SapBwSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -28640,6 +29675,9 @@ class SapCloudForCustomerSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: The write behavior for the operation. Default is 'Insert'. Possible
values include: "Insert", "Update".
:type write_behavior: str or
@@ -28663,6 +29701,7 @@ class SapCloudForCustomerSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'str'},
'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'},
}
@@ -28696,12 +29735,15 @@ class SapCloudForCustomerSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or
Expression with resultType string).
:type query: object
@@ -28722,8 +29764,9 @@ class SapCloudForCustomerSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'},
}
@@ -28880,12 +29923,15 @@ class SapEccSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with
resultType string).
:type query: object
@@ -28906,8 +29952,9 @@ class SapEccSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'},
}
@@ -29033,12 +30080,15 @@ class SapHanaSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: SAP HANA Sql query. Type: string (or Expression with resultType string).
:type query: object
:param packet_size: The packet size of data read from SAP HANA. Type: integer(or Expression
@@ -29062,8 +30112,9 @@ class SapHanaSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'packet_size': {'key': 'packetSize', 'type': 'object'},
'partition_option': {'key': 'partitionOption', 'type': 'object'},
@@ -29263,12 +30314,15 @@ class SapOpenHubSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param exclude_last_request: Whether to exclude the records of the last request. The default
value is true. Type: boolean (or Expression with resultType boolean).
:type exclude_last_request: object
@@ -29295,8 +30349,9 @@ class SapOpenHubSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'},
'base_request_id': {'key': 'baseRequestId', 'type': 'object'},
'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'},
@@ -29625,12 +30680,15 @@ class SapTableSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param row_count: The number of rows to be retrieved. Type: integer(or Expression with
resultType integer).
:type row_count: object
@@ -29672,8 +30730,9 @@ class SapTableSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'row_count': {'key': 'rowCount', 'type': 'object'},
'row_skips': {'key': 'rowSkips', 'type': 'object'},
'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'},
@@ -29806,9 +30865,8 @@ class ScriptAction(msrest.serialization.Model):
:type name: str
:param uri: Required. The URI for the script action.
:type uri: str
- :param roles: Required. The node types on which the script action should be executed. Possible
- values include: "Headnode", "Workernode", "Zookeeper".
- :type roles: str or ~data_factory_management_client.models.HdiNodeTypes
+ :param roles: Required. The node types on which the script action should be executed.
+ :type roles: str
:param parameters: The parameters for the script action.
:type parameters: str
"""
@@ -30364,12 +31422,15 @@ class ServiceNowSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -30385,8 +31446,9 @@ class ServiceNowSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -30399,6 +31461,54 @@ def __init__(
self.query = kwargs.get('query', None)
+class ServicePrincipalCredential(Credential):
+ """Service principal credential.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :type additional_properties: dict[str, object]
+ :param type: Required. Type of credential.Constant filled by server.
+ :type type: str
+ :param description: Credential description.
+ :type description: str
+ :param annotations: List of tags that can be used for describing the Credential.
+ :type annotations: list[object]
+ :param service_principal_id: The app ID of the service principal used to authenticate.
+ :type service_principal_id: object
+ :param service_principal_key: The key of the service principal used to authenticate.
+ :type service_principal_key:
+ ~data_factory_management_client.models.AzureKeyVaultSecretReference
+ :param tenant: The ID of the tenant to which the service principal belongs.
+ :type tenant: object
+ """
+
+ _validation = {
+ 'type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'additional_properties': {'key': '', 'type': '{object}'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'annotations': {'key': 'annotations', 'type': '[object]'},
+ 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'},
+ 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'AzureKeyVaultSecretReference'},
+ 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ServicePrincipalCredential, self).__init__(**kwargs)
+ self.type = 'ServicePrincipal' # type: str
+ self.service_principal_id = kwargs.get('service_principal_id', None)
+ self.service_principal_key = kwargs.get('service_principal_key', None)
+ self.tenant = kwargs.get('tenant', None)
+
+
class SetVariableActivity(Activity):
"""Set value for a Variable.
@@ -30499,6 +31609,9 @@ class SftpReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -30536,6 +31649,7 @@ class SftpReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -30678,6 +31792,9 @@ class SftpWriteSettings(StoreWriteSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param operation_timeout: Specifies the timeout for writing each chunk to SFTP server. Default
@@ -30697,6 +31814,7 @@ class SftpWriteSettings(StoreWriteSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'operation_timeout': {'key': 'operationTimeout', 'type': 'object'},
'use_temp_file_rename': {'key': 'useTempFileRename', 'type': 'object'},
@@ -30864,6 +31982,9 @@ class SharePointOnlineListSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: The OData query to filter the data in SharePoint Online list. For example,
"$top=1". Type: string (or Expression with resultType string).
:type query: object
@@ -30883,6 +32004,7 @@ class SharePointOnlineListSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'},
}
@@ -31047,12 +32169,15 @@ class ShopifySource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -31068,8 +32193,9 @@ class ShopifySource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -31339,6 +32465,9 @@ class SnowflakeSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType
string).
:type pre_copy_script: object
@@ -31358,6 +32487,7 @@ class SnowflakeSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
'import_settings': {'key': 'importSettings', 'type': 'SnowflakeImportCopyCommand'},
}
@@ -31391,6 +32521,9 @@ class SnowflakeSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Snowflake Sql query. Type: string (or Expression with resultType string).
:type query: object
:param export_settings: Snowflake export settings.
@@ -31407,6 +32540,7 @@ class SnowflakeSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'export_settings': {'key': 'exportSettings', 'type': 'SnowflakeExportCopyCommand'},
}
@@ -31624,12 +32758,15 @@ class SparkSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -31645,8 +32782,9 @@ class SparkSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -31722,6 +32860,9 @@ class SqlDwSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType
string).
:type pre_copy_script: object
@@ -31739,6 +32880,14 @@ class SqlDwSink(CopySink):
:param table_option: The option to handle sink table, such as autoCreate. For now only
'autoCreate' value is supported. Type: string (or Expression with resultType string).
:type table_option: object
+ :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or
+ Expression with resultType boolean).
+ :type sql_writer_use_table_lock: object
+ :param write_behavior: Write behavior when copying data into azure SQL DW. Type:
+ SqlDWWriteBehaviorEnum (or Expression with resultType SqlDWWriteBehaviorEnum).
+ :type write_behavior: object
+ :param upsert_settings: SQL DW upsert settings.
+ :type upsert_settings: ~data_factory_management_client.models.SqlDwUpsertSettings
"""
_validation = {
@@ -31753,12 +32902,16 @@ class SqlDwSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'},
'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'},
'allow_copy_command': {'key': 'allowCopyCommand', 'type': 'object'},
'copy_command_settings': {'key': 'copyCommandSettings', 'type': 'DwCopyCommandSettings'},
'table_option': {'key': 'tableOption', 'type': 'object'},
+ 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'},
+ 'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
+ 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlDwUpsertSettings'},
}
def __init__(
@@ -31773,6 +32926,9 @@ def __init__(
self.allow_copy_command = kwargs.get('allow_copy_command', None)
self.copy_command_settings = kwargs.get('copy_command_settings', None)
self.table_option = kwargs.get('table_option', None)
+ self.sql_writer_use_table_lock = kwargs.get('sql_writer_use_table_lock', None)
+ self.write_behavior = kwargs.get('write_behavior', None)
+ self.upsert_settings = kwargs.get('upsert_settings', None)
class SqlDwSource(TabularSource):
@@ -31794,12 +32950,15 @@ class SqlDwSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with
resultType string).
:type sql_reader_query: object
@@ -31828,8 +32987,9 @@ class SqlDwSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'},
'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'},
@@ -31850,6 +33010,31 @@ def __init__(
self.partition_settings = kwargs.get('partition_settings', None)
+class SqlDwUpsertSettings(msrest.serialization.Model):
+ """Sql DW upsert option settings.
+
+ :param interim_schema_name: Schema name for interim table. Type: string (or Expression with
+ resultType string).
+ :type interim_schema_name: object
+ :param keys: Key column names for unique row identification. Type: array of strings (or
+ Expression with resultType array of strings).
+ :type keys: object
+ """
+
+ _attribute_map = {
+ 'interim_schema_name': {'key': 'interimSchemaName', 'type': 'object'},
+ 'keys': {'key': 'keys', 'type': 'object'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SqlDwUpsertSettings, self).__init__(**kwargs)
+ self.interim_schema_name = kwargs.get('interim_schema_name', None)
+ self.keys = kwargs.get('keys', None)
+
+
class SqlMiSink(CopySink):
"""A copy activity Azure SQL Managed Instance sink.
@@ -31875,6 +33060,9 @@ class SqlMiSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or
Expression with resultType string).
:type sql_writer_stored_procedure_name: object
@@ -31893,6 +33081,14 @@ class SqlMiSink(CopySink):
:param table_option: The option to handle sink table, such as autoCreate. For now only
'autoCreate' value is supported. Type: string (or Expression with resultType string).
:type table_option: object
+ :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or
+ Expression with resultType boolean).
+ :type sql_writer_use_table_lock: object
+ :param write_behavior: White behavior when copying data into azure SQL MI. Type:
+ SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum).
+ :type write_behavior: object
+ :param upsert_settings: SQL upsert settings.
+ :type upsert_settings: ~data_factory_management_client.models.SqlUpsertSettings
"""
_validation = {
@@ -31907,12 +33103,16 @@ class SqlMiSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'},
'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'},
'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'},
'table_option': {'key': 'tableOption', 'type': 'object'},
+ 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'},
+ 'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
+ 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'},
}
def __init__(
@@ -31927,6 +33127,9 @@ def __init__(
self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None)
self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None)
self.table_option = kwargs.get('table_option', None)
+ self.sql_writer_use_table_lock = kwargs.get('sql_writer_use_table_lock', None)
+ self.write_behavior = kwargs.get('write_behavior', None)
+ self.upsert_settings = kwargs.get('upsert_settings', None)
class SqlMiSource(TabularSource):
@@ -31948,12 +33151,15 @@ class SqlMiSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string).
:type sql_reader_query: object
:param sql_reader_stored_procedure_name: Name of the stored procedure for a Azure SQL Managed
@@ -31983,8 +33189,9 @@ class SqlMiSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'},
'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'},
@@ -32134,6 +33341,9 @@ class SqlServerSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or
Expression with resultType string).
:type sql_writer_stored_procedure_name: object
@@ -32152,6 +33362,14 @@ class SqlServerSink(CopySink):
:param table_option: The option to handle sink table, such as autoCreate. For now only
'autoCreate' value is supported. Type: string (or Expression with resultType string).
:type table_option: object
+ :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or
+ Expression with resultType boolean).
+ :type sql_writer_use_table_lock: object
+ :param write_behavior: Write behavior when copying data into sql server. Type:
+ SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum).
+ :type write_behavior: object
+ :param upsert_settings: SQL upsert settings.
+ :type upsert_settings: ~data_factory_management_client.models.SqlUpsertSettings
"""
_validation = {
@@ -32166,12 +33384,16 @@ class SqlServerSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'},
'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'},
'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'},
'table_option': {'key': 'tableOption', 'type': 'object'},
+ 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'},
+ 'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
+ 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'},
}
def __init__(
@@ -32186,6 +33408,9 @@ def __init__(
self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None)
self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None)
self.table_option = kwargs.get('table_option', None)
+ self.sql_writer_use_table_lock = kwargs.get('sql_writer_use_table_lock', None)
+ self.write_behavior = kwargs.get('write_behavior', None)
+ self.upsert_settings = kwargs.get('upsert_settings', None)
class SqlServerSource(TabularSource):
@@ -32207,12 +33432,15 @@ class SqlServerSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string).
:type sql_reader_query: object
:param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database
@@ -32242,8 +33470,9 @@ class SqlServerSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'},
'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'},
@@ -32420,6 +33649,9 @@ class SqlSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or
Expression with resultType string).
:type sql_writer_stored_procedure_name: object
@@ -32438,6 +33670,14 @@ class SqlSink(CopySink):
:param table_option: The option to handle sink table, such as autoCreate. For now only
'autoCreate' value is supported. Type: string (or Expression with resultType string).
:type table_option: object
+ :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or
+ Expression with resultType boolean).
+ :type sql_writer_use_table_lock: object
+ :param write_behavior: Write behavior when copying data into sql. Type: SqlWriteBehaviorEnum
+ (or Expression with resultType SqlWriteBehaviorEnum).
+ :type write_behavior: object
+ :param upsert_settings: SQL upsert settings.
+ :type upsert_settings: ~data_factory_management_client.models.SqlUpsertSettings
"""
_validation = {
@@ -32452,12 +33692,16 @@ class SqlSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'},
'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'},
'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'},
'table_option': {'key': 'tableOption', 'type': 'object'},
+ 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'},
+ 'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
+ 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'},
}
def __init__(
@@ -32472,6 +33716,9 @@ def __init__(
self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None)
self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None)
self.table_option = kwargs.get('table_option', None)
+ self.sql_writer_use_table_lock = kwargs.get('sql_writer_use_table_lock', None)
+ self.write_behavior = kwargs.get('write_behavior', None)
+ self.upsert_settings = kwargs.get('upsert_settings', None)
class SqlSource(TabularSource):
@@ -32493,12 +33740,15 @@ class SqlSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string).
:type sql_reader_query: object
:param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database
@@ -32530,8 +33780,9 @@ class SqlSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'},
'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'},
@@ -32554,6 +33805,36 @@ def __init__(
self.partition_settings = kwargs.get('partition_settings', None)
+class SqlUpsertSettings(msrest.serialization.Model):
+ """Sql upsert option settings.
+
+ :param use_temp_db: Specifies whether to use temp db for upsert interim table. Type: boolean
+ (or Expression with resultType boolean).
+ :type use_temp_db: object
+ :param interim_schema_name: Schema name for interim table. Type: string (or Expression with
+ resultType string).
+ :type interim_schema_name: object
+ :param keys: Key column names for unique row identification. Type: array of strings (or
+ Expression with resultType array of strings).
+ :type keys: object
+ """
+
+ _attribute_map = {
+ 'use_temp_db': {'key': 'useTempDB', 'type': 'object'},
+ 'interim_schema_name': {'key': 'interimSchemaName', 'type': 'object'},
+ 'keys': {'key': 'keys', 'type': 'object'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SqlUpsertSettings, self).__init__(**kwargs)
+ self.use_temp_db = kwargs.get('use_temp_db', None)
+ self.interim_schema_name = kwargs.get('interim_schema_name', None)
+ self.keys = kwargs.get('keys', None)
+
+
class SquareLinkedService(LinkedService):
"""Square Service linked service.
@@ -32716,12 +33997,15 @@ class SquareSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -32737,8 +34021,9 @@ class SquareSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -33679,12 +34964,15 @@ class SybaseSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
"""
@@ -33699,8 +34987,9 @@ class SybaseSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -34030,12 +35319,15 @@ class TeradataSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: Teradata query. Type: string (or Expression with resultType string).
:type query: object
:param partition_option: The partition mechanism that will be used for teradata read in
@@ -34056,8 +35348,9 @@ class TeradataSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'partition_option': {'key': 'partitionOption', 'type': 'object'},
'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'},
@@ -35107,12 +36400,15 @@ class VerticaSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -35128,8 +36424,9 @@ class VerticaSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -35348,10 +36645,7 @@ def __init__(
class WebActivityAuthentication(msrest.serialization.Model):
"""Web activity authentication properties.
- All required parameters must be populated in order to send to Azure.
-
- :param type: Required. Web activity authentication
- (Basic/ClientCertificate/MSI/ServicePrincipal).
+ :param type: Web activity authentication (Basic/ClientCertificate/MSI/ServicePrincipal).
:type type: str
:param pfx: Base64-encoded contents of a PFX file or Certificate when used for
ServicePrincipal.
@@ -35368,12 +36662,10 @@ class WebActivityAuthentication(msrest.serialization.Model):
:param user_tenant: TenantId for which Azure Auth token will be requested when using
ServicePrincipal Authentication. Type: string (or Expression with resultType string).
:type user_tenant: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
- _validation = {
- 'type': {'required': True},
- }
-
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'pfx': {'key': 'pfx', 'type': 'SecretBase'},
@@ -35381,6 +36673,7 @@ class WebActivityAuthentication(msrest.serialization.Model):
'password': {'key': 'password', 'type': 'SecretBase'},
'resource': {'key': 'resource', 'type': 'object'},
'user_tenant': {'key': 'userTenant', 'type': 'object'},
+ 'credential': {'key': 'credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -35388,12 +36681,13 @@ def __init__(
**kwargs
):
super(WebActivityAuthentication, self).__init__(**kwargs)
- self.type = kwargs['type']
+ self.type = kwargs.get('type', None)
self.pfx = kwargs.get('pfx', None)
self.username = kwargs.get('username', None)
self.password = kwargs.get('password', None)
self.resource = kwargs.get('resource', None)
self.user_tenant = kwargs.get('user_tenant', None)
+ self.credential = kwargs.get('credential', None)
class WebLinkedServiceTypeProperties(msrest.serialization.Model):
@@ -35699,9 +36993,12 @@ class WebSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -35714,7 +37011,8 @@ class WebSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -35950,12 +37248,15 @@ class XeroSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -35971,8 +37272,9 @@ class XeroSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -36133,13 +37435,16 @@ class XmlSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Xml store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param format_settings: Xml format settings.
:type format_settings: ~data_factory_management_client.models.XmlReadSettings
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -36152,9 +37457,10 @@ class XmlSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'XmlReadSettings'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -36355,12 +37661,15 @@ class ZohoSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -36376,8 +37685,9 @@ class ZohoSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py
index f6ebc8328ae..f25551d208c 100644
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py
+++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py
@@ -711,6 +711,9 @@ class CopySource(msrest.serialization.Model):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
"""
_validation = {
@@ -723,6 +726,7 @@ class CopySource(msrest.serialization.Model):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
}
_subtype_map = {
@@ -736,6 +740,7 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
**kwargs
):
super(CopySource, self).__init__(**kwargs)
@@ -744,6 +749,7 @@ def __init__(
self.source_retry_count = source_retry_count
self.source_retry_wait = source_retry_wait
self.max_concurrent_connections = max_concurrent_connections
+ self.disable_metrics_collection = disable_metrics_collection
class TabularSource(CopySource):
@@ -768,12 +774,15 @@ class TabularSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -786,8 +795,9 @@ class TabularSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
_subtype_map = {
@@ -801,11 +811,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
**kwargs
):
- super(TabularSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(TabularSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'TabularSource' # type: str
self.query_timeout = query_timeout
self.additional_columns = additional_columns
@@ -830,12 +841,15 @@ class AmazonMwsSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -851,8 +865,9 @@ class AmazonMwsSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -863,12 +878,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(AmazonMwsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(AmazonMwsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'AmazonMWSSource' # type: str
self.query = query
@@ -977,12 +993,15 @@ class AmazonRedshiftSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
:param redshift_unload_settings: The Amazon S3 settings needed for the interim Amazon S3 when
@@ -1001,8 +1020,9 @@ class AmazonRedshiftSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'},
}
@@ -1014,13 +1034,14 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
redshift_unload_settings: Optional["RedshiftUnloadSettings"] = None,
**kwargs
):
- super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'AmazonRedshiftSource' # type: str
self.query = query
self.redshift_unload_settings = redshift_unload_settings
@@ -1307,6 +1328,9 @@ class StoreReadSettings(msrest.serialization.Model):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
"""
_validation = {
@@ -1317,6 +1341,7 @@ class StoreReadSettings(msrest.serialization.Model):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
}
_subtype_map = {
@@ -1328,12 +1353,14 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
**kwargs
):
super(StoreReadSettings, self).__init__(**kwargs)
self.additional_properties = additional_properties
self.type = 'StoreReadSettings' # type: str
self.max_concurrent_connections = max_concurrent_connections
+ self.disable_metrics_collection = disable_metrics_collection
class AmazonS3CompatibleReadSettings(StoreReadSettings):
@@ -1349,6 +1376,9 @@ class AmazonS3CompatibleReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -1389,6 +1419,7 @@ class AmazonS3CompatibleReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -1406,6 +1437,7 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
wildcard_folder_path: Optional[object] = None,
wildcard_file_name: Optional[object] = None,
@@ -1418,7 +1450,7 @@ def __init__(
modified_datetime_end: Optional[object] = None,
**kwargs
):
- super(AmazonS3CompatibleReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AmazonS3CompatibleReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AmazonS3CompatibleReadSettings' # type: str
self.recursive = recursive
self.wildcard_folder_path = wildcard_folder_path
@@ -1692,6 +1724,9 @@ class AmazonS3ReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -1732,6 +1767,7 @@ class AmazonS3ReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -1749,6 +1785,7 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
wildcard_folder_path: Optional[object] = None,
wildcard_file_name: Optional[object] = None,
@@ -1761,7 +1798,7 @@ def __init__(
modified_datetime_end: Optional[object] = None,
**kwargs
):
- super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AmazonS3ReadSettings' # type: str
self.recursive = recursive
self.wildcard_folder_path = wildcard_folder_path
@@ -1887,10 +1924,9 @@ class AvroDataset(Dataset):
:type folder: ~data_factory_management_client.models.DatasetFolder
:param location: The location of the avro storage.
:type location: ~data_factory_management_client.models.DatasetLocation
- :param avro_compression_codec: Possible values include: "none", "deflate", "snappy", "xz",
- "bzip2".
- :type avro_compression_codec: str or
- ~data_factory_management_client.models.AvroCompressionCodec
+ :param avro_compression_codec: The data avroCompressionCodec. Type: string (or Expression with
+ resultType string).
+ :type avro_compression_codec: object
:param avro_compression_level:
:type avro_compression_level: int
"""
@@ -1912,7 +1948,7 @@ class AvroDataset(Dataset):
'annotations': {'key': 'annotations', 'type': '[object]'},
'folder': {'key': 'folder', 'type': 'DatasetFolder'},
'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'},
- 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'},
+ 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'object'},
'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'},
}
@@ -1928,7 +1964,7 @@ def __init__(
annotations: Optional[List[object]] = None,
folder: Optional["DatasetFolder"] = None,
location: Optional["DatasetLocation"] = None,
- avro_compression_codec: Optional[Union[str, "AvroCompressionCodec"]] = None,
+ avro_compression_codec: Optional[object] = None,
avro_compression_level: Optional[int] = None,
**kwargs
):
@@ -2031,7 +2067,7 @@ class CopySink(msrest.serialization.Model):
"""A copy activity sink.
You probably want to use the sub-classes and not this class directly. Known
- sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDwSink, SqlMiSink, SqlServerSink, SqlSink.
+ sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, MongoDbAtlasSink, MongoDbV2Sink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDwSink, SqlMiSink, SqlServerSink, SqlSink.
All required parameters must be populated in order to send to Azure.
@@ -2055,6 +2091,9 @@ class CopySink(msrest.serialization.Model):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
"""
_validation = {
@@ -2069,10 +2108,11 @@ class CopySink(msrest.serialization.Model):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
}
_subtype_map = {
- 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDwSink', 'SqlMISink': 'SqlMiSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'}
+ 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'MongoDbAtlasSink': 'MongoDbAtlasSink', 'MongoDbV2Sink': 'MongoDbV2Sink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDwSink', 'SqlMISink': 'SqlMiSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'}
}
def __init__(
@@ -2084,6 +2124,7 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
**kwargs
):
super(CopySink, self).__init__(**kwargs)
@@ -2094,6 +2135,7 @@ def __init__(
self.sink_retry_count = sink_retry_count
self.sink_retry_wait = sink_retry_wait
self.max_concurrent_connections = max_concurrent_connections
+ self.disable_metrics_collection = disable_metrics_collection
class AvroSink(CopySink):
@@ -2121,6 +2163,9 @@ class AvroSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Avro store settings.
:type store_settings: ~data_factory_management_client.models.StoreWriteSettings
:param format_settings: Avro format settings.
@@ -2139,6 +2184,7 @@ class AvroSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'},
}
@@ -2152,11 +2198,12 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
store_settings: Optional["StoreWriteSettings"] = None,
format_settings: Optional["AvroWriteSettings"] = None,
**kwargs
):
- super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AvroSink' # type: str
self.store_settings = store_settings
self.format_settings = format_settings
@@ -2181,11 +2228,14 @@ class AvroSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Avro store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -2198,8 +2248,9 @@ class AvroSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -2209,11 +2260,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
store_settings: Optional["StoreReadSettings"] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
**kwargs
):
- super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AvroSource' # type: str
self.store_settings = store_settings
self.additional_columns = additional_columns
@@ -2411,6 +2463,8 @@ class AzureBatchLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -2434,6 +2488,7 @@ class AzureBatchLinkedService(LinkedService):
'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'},
'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -2450,6 +2505,7 @@ def __init__(
annotations: Optional[List[object]] = None,
access_key: Optional["SecretBase"] = None,
encrypted_credential: Optional[object] = None,
+ credential: Optional["CredentialReference"] = None,
**kwargs
):
super(AzureBatchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
@@ -2460,6 +2516,7 @@ def __init__(
self.pool_name = pool_name
self.linked_service_name = linked_service_name
self.encrypted_credential = encrypted_credential
+ self.credential = credential
class AzureBlobDataset(Dataset):
@@ -2691,6 +2748,8 @@ class AzureBlobFsLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -2712,6 +2771,7 @@ class AzureBlobFsLinkedService(LinkedService):
'tenant': {'key': 'typeProperties.tenant', 'type': 'object'},
'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -2729,6 +2789,7 @@ def __init__(
tenant: Optional[object] = None,
azure_cloud_type: Optional[object] = None,
encrypted_credential: Optional[object] = None,
+ credential: Optional["CredentialReference"] = None,
**kwargs
):
super(AzureBlobFsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
@@ -2740,6 +2801,7 @@ def __init__(
self.tenant = tenant
self.azure_cloud_type = azure_cloud_type
self.encrypted_credential = encrypted_credential
+ self.credential = credential
class AzureBlobFsLocation(DatasetLocation):
@@ -2802,6 +2864,9 @@ class AzureBlobFsReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -2839,6 +2904,7 @@ class AzureBlobFsReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -2855,6 +2921,7 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
wildcard_folder_path: Optional[object] = None,
wildcard_file_name: Optional[object] = None,
@@ -2866,7 +2933,7 @@ def __init__(
modified_datetime_end: Optional[object] = None,
**kwargs
):
- super(AzureBlobFsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureBlobFsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureBlobFSReadSettings' # type: str
self.recursive = recursive
self.wildcard_folder_path = wildcard_folder_path
@@ -2904,8 +2971,14 @@ class AzureBlobFsSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
+ :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects
+ (or Expression with resultType array of objects).
+ :type metadata: list[~data_factory_management_client.models.MetadataItem]
"""
_validation = {
@@ -2920,7 +2993,9 @@ class AzureBlobFsSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
+ 'metadata': {'key': 'metadata', 'type': '[MetadataItem]'},
}
def __init__(
@@ -2932,12 +3007,15 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
copy_behavior: Optional[object] = None,
+ metadata: Optional[List["MetadataItem"]] = None,
**kwargs
):
- super(AzureBlobFsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureBlobFsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureBlobFSSink' # type: str
self.copy_behavior = copy_behavior
+ self.metadata = metadata
class AzureBlobFsSource(CopySource):
@@ -2959,6 +3037,9 @@ class AzureBlobFsSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType
boolean).
:type treat_empty_as_null: object
@@ -2980,6 +3061,7 @@ class AzureBlobFsSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'},
'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
@@ -2992,12 +3074,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
treat_empty_as_null: Optional[object] = None,
skip_header_line_count: Optional[object] = None,
recursive: Optional[object] = None,
**kwargs
):
- super(AzureBlobFsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureBlobFsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureBlobFSSource' # type: str
self.treat_empty_as_null = treat_empty_as_null
self.skip_header_line_count = skip_header_line_count
@@ -3020,6 +3103,9 @@ class StoreWriteSettings(msrest.serialization.Model):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
"""
@@ -3032,6 +3118,7 @@ class StoreWriteSettings(msrest.serialization.Model):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
}
@@ -3044,6 +3131,7 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
copy_behavior: Optional[object] = None,
**kwargs
):
@@ -3051,6 +3139,7 @@ def __init__(
self.additional_properties = additional_properties
self.type = 'StoreWriteSettings' # type: str
self.max_concurrent_connections = max_concurrent_connections
+ self.disable_metrics_collection = disable_metrics_collection
self.copy_behavior = copy_behavior
@@ -3067,6 +3156,9 @@ class AzureBlobFsWriteSettings(StoreWriteSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer
@@ -3082,6 +3174,7 @@ class AzureBlobFsWriteSettings(StoreWriteSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'},
}
@@ -3091,11 +3184,12 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
copy_behavior: Optional[object] = None,
block_size_in_mb: Optional[object] = None,
**kwargs
):
- super(AzureBlobFsWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs)
+ super(AzureBlobFsWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs)
self.type = 'AzureBlobFSWriteSettings' # type: str
self.block_size_in_mb = block_size_in_mb
@@ -3153,6 +3247,8 @@ class AzureBlobStorageLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: str
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -3177,6 +3273,7 @@ class AzureBlobStorageLinkedService(LinkedService):
'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'},
'account_kind': {'key': 'typeProperties.accountKind', 'type': 'str'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -3198,6 +3295,7 @@ def __init__(
azure_cloud_type: Optional[object] = None,
account_kind: Optional[str] = None,
encrypted_credential: Optional[str] = None,
+ credential: Optional["CredentialReference"] = None,
**kwargs
):
super(AzureBlobStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
@@ -3213,6 +3311,7 @@ def __init__(
self.azure_cloud_type = azure_cloud_type
self.account_kind = account_kind
self.encrypted_credential = encrypted_credential
+ self.credential = credential
class AzureBlobStorageLocation(DatasetLocation):
@@ -3275,6 +3374,9 @@ class AzureBlobStorageReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -3315,6 +3417,7 @@ class AzureBlobStorageReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -3332,6 +3435,7 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
wildcard_folder_path: Optional[object] = None,
wildcard_file_name: Optional[object] = None,
@@ -3344,7 +3448,7 @@ def __init__(
modified_datetime_end: Optional[object] = None,
**kwargs
):
- super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureBlobStorageReadSettings' # type: str
self.recursive = recursive
self.wildcard_folder_path = wildcard_folder_path
@@ -3371,6 +3475,9 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer
@@ -3386,6 +3493,7 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'},
}
@@ -3395,11 +3503,12 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
copy_behavior: Optional[object] = None,
block_size_in_mb: Optional[object] = None,
**kwargs
):
- super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs)
+ super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs)
self.type = 'AzureBlobStorageWriteSettings' # type: str
self.block_size_in_mb = block_size_in_mb
@@ -3741,6 +3850,9 @@ class AzureDatabricksDeltaLakeSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType
string).
:type pre_copy_script: object
@@ -3761,6 +3873,7 @@ class AzureDatabricksDeltaLakeSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
'import_settings': {'key': 'importSettings', 'type': 'AzureDatabricksDeltaLakeImportCommand'},
}
@@ -3774,11 +3887,12 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
pre_copy_script: Optional[object] = None,
import_settings: Optional["AzureDatabricksDeltaLakeImportCommand"] = None,
**kwargs
):
- super(AzureDatabricksDeltaLakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureDatabricksDeltaLakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureDatabricksDeltaLakeSink' # type: str
self.pre_copy_script = pre_copy_script
self.import_settings = import_settings
@@ -3803,6 +3917,9 @@ class AzureDatabricksDeltaLakeSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with
resultType string).
:type query: object
@@ -3821,6 +3938,7 @@ class AzureDatabricksDeltaLakeSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'export_settings': {'key': 'exportSettings', 'type': 'AzureDatabricksDeltaLakeExportCommand'},
}
@@ -3832,11 +3950,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query: Optional[object] = None,
export_settings: Optional["AzureDatabricksDeltaLakeExportCommand"] = None,
**kwargs
):
- super(AzureDatabricksDeltaLakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureDatabricksDeltaLakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureDatabricksDeltaLakeSource' # type: str
self.query = query
self.export_settings = export_settings
@@ -3925,6 +4044,8 @@ class AzureDatabricksLinkedService(LinkedService):
:param policy_id: The policy id for limiting the ability to configure clusters based on a user
defined set of rules. Type: string (or Expression with resultType string).
:type policy_id: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -3957,6 +4078,7 @@ class AzureDatabricksLinkedService(LinkedService):
'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
'policy_id': {'key': 'typeProperties.policyId', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -3985,6 +4107,7 @@ def __init__(
new_cluster_enable_elastic_disk: Optional[object] = None,
encrypted_credential: Optional[object] = None,
policy_id: Optional[object] = None,
+ credential: Optional["CredentialReference"] = None,
**kwargs
):
super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
@@ -4007,6 +4130,7 @@ def __init__(
self.new_cluster_enable_elastic_disk = new_cluster_enable_elastic_disk
self.encrypted_credential = encrypted_credential
self.policy_id = policy_id
+ self.credential = credential
class ExecutionActivity(Activity):
@@ -4177,6 +4301,8 @@ class AzureDataExplorerLinkedService(LinkedService):
:param tenant: The name or ID of the tenant to which the service principal belongs. Type:
string (or Expression with resultType string).
:type tenant: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -4197,6 +4323,7 @@ class AzureDataExplorerLinkedService(LinkedService):
'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'},
'database': {'key': 'typeProperties.database', 'type': 'object'},
'tenant': {'key': 'typeProperties.tenant', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -4212,6 +4339,7 @@ def __init__(
service_principal_id: Optional[object] = None,
service_principal_key: Optional["SecretBase"] = None,
tenant: Optional[object] = None,
+ credential: Optional["CredentialReference"] = None,
**kwargs
):
super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
@@ -4221,6 +4349,7 @@ def __init__(
self.service_principal_key = service_principal_key
self.database = database
self.tenant = tenant
+ self.credential = credential
class AzureDataExplorerSink(CopySink):
@@ -4248,6 +4377,9 @@ class AzureDataExplorerSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param ingestion_mapping_name: A name of a pre-created csv mapping that was defined on the
target Kusto table. Type: string.
:type ingestion_mapping_name: object
@@ -4271,6 +4403,7 @@ class AzureDataExplorerSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'},
'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'},
'flush_immediately': {'key': 'flushImmediately', 'type': 'object'},
@@ -4285,12 +4418,13 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
ingestion_mapping_name: Optional[object] = None,
ingestion_mapping_as_json: Optional[object] = None,
flush_immediately: Optional[object] = None,
**kwargs
):
- super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureDataExplorerSink' # type: str
self.ingestion_mapping_name = ingestion_mapping_name
self.ingestion_mapping_as_json = ingestion_mapping_as_json
@@ -4316,6 +4450,9 @@ class AzureDataExplorerSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Required. Database query. Should be a Kusto Query Language (KQL) query. Type:
string (or Expression with resultType string).
:type query: object
@@ -4326,8 +4463,8 @@ class AzureDataExplorerSource(CopySource):
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -4341,10 +4478,11 @@ class AzureDataExplorerSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'no_truncation': {'key': 'noTruncation', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -4355,12 +4493,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
no_truncation: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
**kwargs
):
- super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureDataExplorerSource' # type: str
self.query = query
self.no_truncation = no_truncation
@@ -4668,6 +4807,8 @@ class AzureDataLakeStoreLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -4691,6 +4832,7 @@ class AzureDataLakeStoreLinkedService(LinkedService):
'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'},
'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -4710,6 +4852,7 @@ def __init__(
subscription_id: Optional[object] = None,
resource_group_name: Optional[object] = None,
encrypted_credential: Optional[object] = None,
+ credential: Optional["CredentialReference"] = None,
**kwargs
):
super(AzureDataLakeStoreLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
@@ -4723,6 +4866,7 @@ def __init__(
self.subscription_id = subscription_id
self.resource_group_name = resource_group_name
self.encrypted_credential = encrypted_credential
+ self.credential = credential
class AzureDataLakeStoreLocation(DatasetLocation):
@@ -4779,6 +4923,9 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -4824,6 +4971,7 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -4842,6 +4990,7 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
wildcard_folder_path: Optional[object] = None,
wildcard_file_name: Optional[object] = None,
@@ -4855,7 +5004,7 @@ def __init__(
modified_datetime_end: Optional[object] = None,
**kwargs
):
- super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureDataLakeStoreReadSettings' # type: str
self.recursive = recursive
self.wildcard_folder_path = wildcard_folder_path
@@ -4895,6 +5044,9 @@ class AzureDataLakeStoreSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param enable_adls_single_file_parallel: Single File Parallel.
@@ -4913,6 +5065,7 @@ class AzureDataLakeStoreSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'},
}
@@ -4926,11 +5079,12 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
copy_behavior: Optional[object] = None,
enable_adls_single_file_parallel: Optional[object] = None,
**kwargs
):
- super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureDataLakeStoreSink' # type: str
self.copy_behavior = copy_behavior
self.enable_adls_single_file_parallel = enable_adls_single_file_parallel
@@ -4955,6 +5109,9 @@ class AzureDataLakeStoreSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -4970,6 +5127,7 @@ class AzureDataLakeStoreSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
}
@@ -4980,10 +5138,11 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
**kwargs
):
- super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureDataLakeStoreSource' # type: str
self.recursive = recursive
@@ -5001,6 +5160,9 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param expiry_date_time: Specifies the expiry time of the written files. The time is applied to
@@ -5017,6 +5179,7 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'expiry_date_time': {'key': 'expiryDateTime', 'type': 'object'},
}
@@ -5026,11 +5189,12 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
copy_behavior: Optional[object] = None,
expiry_date_time: Optional[object] = None,
**kwargs
):
- super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs)
+ super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs)
self.type = 'AzureDataLakeStoreWriteSettings' # type: str
self.expiry_date_time = expiry_date_time
@@ -5193,6 +5357,9 @@ class AzureFileStorageReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -5233,6 +5400,7 @@ class AzureFileStorageReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -5250,6 +5418,7 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
wildcard_folder_path: Optional[object] = None,
wildcard_file_name: Optional[object] = None,
@@ -5262,7 +5431,7 @@ def __init__(
modified_datetime_end: Optional[object] = None,
**kwargs
):
- super(AzureFileStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureFileStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureFileStorageReadSettings' # type: str
self.recursive = recursive
self.wildcard_folder_path = wildcard_folder_path
@@ -5289,6 +5458,9 @@ class AzureFileStorageWriteSettings(StoreWriteSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
"""
@@ -5301,6 +5473,7 @@ class AzureFileStorageWriteSettings(StoreWriteSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
}
@@ -5309,10 +5482,11 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
copy_behavior: Optional[object] = None,
**kwargs
):
- super(AzureFileStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs)
+ super(AzureFileStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs)
self.type = 'AzureFileStorageWriteSettings' # type: str
@@ -5426,6 +5600,13 @@ class AzureFunctionLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
+ :param resource_id: Allowed token audiences for azure function.
+ :type resource_id: object
+ :param authentication: Type of authentication (Required to specify MSI) used to connect to
+ AzureFunction. Type: string (or Expression with resultType string).
+ :type authentication: object
"""
_validation = {
@@ -5443,6 +5624,9 @@ class AzureFunctionLinkedService(LinkedService):
'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'},
'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
+ 'resource_id': {'key': 'typeProperties.resourceId', 'type': 'object'},
+ 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'},
}
def __init__(
@@ -5456,6 +5640,9 @@ def __init__(
annotations: Optional[List[object]] = None,
function_key: Optional["SecretBase"] = None,
encrypted_credential: Optional[object] = None,
+ credential: Optional["CredentialReference"] = None,
+ resource_id: Optional[object] = None,
+ authentication: Optional[object] = None,
**kwargs
):
super(AzureFunctionLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
@@ -5463,6 +5650,9 @@ def __init__(
self.function_app_url = function_app_url
self.function_key = function_key
self.encrypted_credential = encrypted_credential
+ self.credential = credential
+ self.resource_id = resource_id
+ self.authentication = authentication
class AzureKeyVaultLinkedService(LinkedService):
@@ -5486,6 +5676,8 @@ class AzureKeyVaultLinkedService(LinkedService):
:param base_url: Required. The base URL of the Azure Key Vault. e.g.
https://myakv.vault.azure.net Type: string (or Expression with resultType string).
:type base_url: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -5501,6 +5693,7 @@ class AzureKeyVaultLinkedService(LinkedService):
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -5512,11 +5705,13 @@ def __init__(
description: Optional[str] = None,
parameters: Optional[Dict[str, "ParameterSpecification"]] = None,
annotations: Optional[List[object]] = None,
+ credential: Optional["CredentialReference"] = None,
**kwargs
):
super(AzureKeyVaultLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
self.type = 'AzureKeyVault' # type: str
self.base_url = base_url
+ self.credential = credential
class SecretBase(msrest.serialization.Model):
@@ -5680,12 +5875,15 @@ class AzureMariaDbSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -5701,8 +5899,9 @@ class AzureMariaDbSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -5713,12 +5912,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(AzureMariaDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(AzureMariaDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'AzureMariaDBSource' # type: str
self.query = query
@@ -6024,6 +6224,9 @@ class AzureMlLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: object
+ :param authentication: Type of authentication (Required to specify MSI) used to connect to
+ AzureML. Type: string (or Expression with resultType string).
+ :type authentication: object
"""
_validation = {
@@ -6046,6 +6249,7 @@ class AzureMlLinkedService(LinkedService):
'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'},
'tenant': {'key': 'typeProperties.tenant', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
+ 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'},
}
def __init__(
@@ -6063,6 +6267,7 @@ def __init__(
service_principal_key: Optional["SecretBase"] = None,
tenant: Optional[object] = None,
encrypted_credential: Optional[object] = None,
+ authentication: Optional[object] = None,
**kwargs
):
super(AzureMlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
@@ -6074,6 +6279,7 @@ def __init__(
self.service_principal_key = service_principal_key
self.tenant = tenant
self.encrypted_credential = encrypted_credential
+ self.authentication = authentication
class AzureMlServiceLinkedService(LinkedService):
@@ -6375,6 +6581,9 @@ class AzureMySqlSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: A query to execute before starting the copy. Type: string (or
Expression with resultType string).
:type pre_copy_script: object
@@ -6392,6 +6601,7 @@ class AzureMySqlSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
}
@@ -6404,10 +6614,11 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
pre_copy_script: Optional[object] = None,
**kwargs
):
- super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureMySqlSink' # type: str
self.pre_copy_script = pre_copy_script
@@ -6431,12 +6642,15 @@ class AzureMySqlSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
"""
@@ -6451,8 +6665,9 @@ class AzureMySqlSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -6463,12 +6678,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'AzureMySqlSource' # type: str
self.query = query
@@ -6638,6 +6854,9 @@ class AzurePostgreSqlSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: A query to execute before starting the copy. Type: string (or
Expression with resultType string).
:type pre_copy_script: object
@@ -6655,6 +6874,7 @@ class AzurePostgreSqlSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
}
@@ -6667,10 +6887,11 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
pre_copy_script: Optional[object] = None,
**kwargs
):
- super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzurePostgreSqlSink' # type: str
self.pre_copy_script = pre_copy_script
@@ -6694,12 +6915,15 @@ class AzurePostgreSqlSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -6715,8 +6939,9 @@ class AzurePostgreSqlSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -6727,12 +6952,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'AzurePostgreSqlSource' # type: str
self.query = query
@@ -6843,6 +7069,9 @@ class AzureQueueSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
"""
_validation = {
@@ -6857,6 +7086,7 @@ class AzureQueueSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
}
def __init__(
@@ -6868,9 +7098,10 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
**kwargs
):
- super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureQueueSink' # type: str
@@ -6969,6 +7200,9 @@ class AzureSearchIndexSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: Specify the write behavior when upserting documents into Azure Search
Index. Possible values include: "Merge", "Upload".
:type write_behavior: str or
@@ -6987,6 +7221,7 @@ class AzureSearchIndexSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'str'},
}
@@ -6999,10 +7234,11 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
write_behavior: Optional[Union[str, "AzureSearchIndexWriteBehaviorType"]] = None,
**kwargs
):
- super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureSearchIndexSink' # type: str
self.write_behavior = write_behavior
@@ -7116,6 +7352,8 @@ class AzureSqlDatabaseLinkedService(LinkedService):
:param always_encrypted_settings: Sql always encrypted properties.
:type always_encrypted_settings:
~data_factory_management_client.models.SqlAlwaysEncryptedProperties
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -7138,6 +7376,7 @@ class AzureSqlDatabaseLinkedService(LinkedService):
'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -7156,6 +7395,7 @@ def __init__(
azure_cloud_type: Optional[object] = None,
encrypted_credential: Optional[object] = None,
always_encrypted_settings: Optional["SqlAlwaysEncryptedProperties"] = None,
+ credential: Optional["CredentialReference"] = None,
**kwargs
):
super(AzureSqlDatabaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
@@ -7168,6 +7408,7 @@ def __init__(
self.azure_cloud_type = azure_cloud_type
self.encrypted_credential = encrypted_credential
self.always_encrypted_settings = always_encrypted_settings
+ self.credential = credential
class AzureSqlDwLinkedService(LinkedService):
@@ -7210,6 +7451,8 @@ class AzureSqlDwLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -7231,6 +7474,7 @@ class AzureSqlDwLinkedService(LinkedService):
'tenant': {'key': 'typeProperties.tenant', 'type': 'object'},
'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -7248,6 +7492,7 @@ def __init__(
tenant: Optional[object] = None,
azure_cloud_type: Optional[object] = None,
encrypted_credential: Optional[object] = None,
+ credential: Optional["CredentialReference"] = None,
**kwargs
):
super(AzureSqlDwLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
@@ -7259,6 +7504,7 @@ def __init__(
self.tenant = tenant
self.azure_cloud_type = azure_cloud_type
self.encrypted_credential = encrypted_credential
+ self.credential = credential
class AzureSqlDwTableDataset(Dataset):
@@ -7385,6 +7631,8 @@ class AzureSqlMiLinkedService(LinkedService):
:param always_encrypted_settings: Sql always encrypted properties.
:type always_encrypted_settings:
~data_factory_management_client.models.SqlAlwaysEncryptedProperties
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -7407,6 +7655,7 @@ class AzureSqlMiLinkedService(LinkedService):
'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -7425,6 +7674,7 @@ def __init__(
azure_cloud_type: Optional[object] = None,
encrypted_credential: Optional[object] = None,
always_encrypted_settings: Optional["SqlAlwaysEncryptedProperties"] = None,
+ credential: Optional["CredentialReference"] = None,
**kwargs
):
super(AzureSqlMiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
@@ -7437,6 +7687,7 @@ def __init__(
self.azure_cloud_type = azure_cloud_type
self.encrypted_credential = encrypted_credential
self.always_encrypted_settings = always_encrypted_settings
+ self.credential = credential
class AzureSqlMiTableDataset(Dataset):
@@ -7545,6 +7796,9 @@ class AzureSqlSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or
Expression with resultType string).
:type sql_writer_stored_procedure_name: object
@@ -7563,6 +7817,14 @@ class AzureSqlSink(CopySink):
:param table_option: The option to handle sink table, such as autoCreate. For now only
'autoCreate' value is supported. Type: string (or Expression with resultType string).
:type table_option: object
+ :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or
+ Expression with resultType boolean).
+ :type sql_writer_use_table_lock: object
+ :param write_behavior: Write behavior when copying data into Azure SQL. Type:
+ SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum).
+ :type write_behavior: object
+ :param upsert_settings: SQL upsert settings.
+ :type upsert_settings: ~data_factory_management_client.models.SqlUpsertSettings
"""
_validation = {
@@ -7577,12 +7839,16 @@ class AzureSqlSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'},
'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'},
'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'},
'table_option': {'key': 'tableOption', 'type': 'object'},
+ 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'},
+ 'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
+ 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'},
}
def __init__(
@@ -7594,15 +7860,19 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
sql_writer_stored_procedure_name: Optional[object] = None,
sql_writer_table_type: Optional[object] = None,
pre_copy_script: Optional[object] = None,
stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None,
stored_procedure_table_type_parameter_name: Optional[object] = None,
table_option: Optional[object] = None,
+ sql_writer_use_table_lock: Optional[object] = None,
+ write_behavior: Optional[object] = None,
+ upsert_settings: Optional["SqlUpsertSettings"] = None,
**kwargs
):
- super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureSqlSink' # type: str
self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name
self.sql_writer_table_type = sql_writer_table_type
@@ -7610,6 +7880,9 @@ def __init__(
self.stored_procedure_parameters = stored_procedure_parameters
self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name
self.table_option = table_option
+ self.sql_writer_use_table_lock = sql_writer_use_table_lock
+ self.write_behavior = write_behavior
+ self.upsert_settings = upsert_settings
class AzureSqlSource(TabularSource):
@@ -7631,12 +7904,15 @@ class AzureSqlSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string).
:type sql_reader_query: object
:param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database
@@ -7666,8 +7942,9 @@ class AzureSqlSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'},
'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'},
@@ -7683,8 +7960,9 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
sql_reader_query: Optional[object] = None,
sql_reader_stored_procedure_name: Optional[object] = None,
stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None,
@@ -7693,7 +7971,7 @@ def __init__(
partition_settings: Optional["SqlPartitionSettings"] = None,
**kwargs
):
- super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'AzureSqlSource' # type: str
self.sql_reader_query = sql_reader_query
self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name
@@ -7955,6 +8233,9 @@ class AzureTableSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param azure_table_default_partition_key_value: Azure Table default partition key value. Type:
string (or Expression with resultType string).
:type azure_table_default_partition_key_value: object
@@ -7981,6 +8262,7 @@ class AzureTableSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'},
'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'},
'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'},
@@ -7996,13 +8278,14 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
azure_table_default_partition_key_value: Optional[object] = None,
azure_table_partition_key_name: Optional[object] = None,
azure_table_row_key_name: Optional[object] = None,
azure_table_insert_type: Optional[object] = None,
**kwargs
):
- super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureTableSink' # type: str
self.azure_table_default_partition_key_value = azure_table_default_partition_key_value
self.azure_table_partition_key_name = azure_table_partition_key_name
@@ -8029,12 +8312,15 @@ class AzureTableSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param azure_table_source_query: Azure Table source query. Type: string (or Expression with
resultType string).
:type azure_table_source_query: object
@@ -8053,8 +8339,9 @@ class AzureTableSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'},
'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'},
}
@@ -8066,13 +8353,14 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
azure_table_source_query: Optional[object] = None,
azure_table_source_ignore_table_not_found: Optional[object] = None,
**kwargs
):
- super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'AzureTableSource' # type: str
self.azure_table_source_query = azure_table_source_query
self.azure_table_source_ignore_table_not_found = azure_table_source_ignore_table_not_found
@@ -8327,6 +8615,9 @@ class BinarySink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Binary store settings.
:type store_settings: ~data_factory_management_client.models.StoreWriteSettings
"""
@@ -8343,6 +8634,7 @@ class BinarySink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'},
}
@@ -8355,10 +8647,11 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
store_settings: Optional["StoreWriteSettings"] = None,
**kwargs
):
- super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'BinarySink' # type: str
self.store_settings = store_settings
@@ -8382,6 +8675,9 @@ class BinarySource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Binary store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param format_settings: Binary format settings.
@@ -8398,6 +8694,7 @@ class BinarySource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'BinaryReadSettings'},
}
@@ -8409,11 +8706,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
store_settings: Optional["StoreReadSettings"] = None,
format_settings: Optional["BinaryReadSettings"] = None,
**kwargs
):
- super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'BinarySource' # type: str
self.store_settings = store_settings
self.format_settings = format_settings
@@ -8641,6 +8939,9 @@ class BlobSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param blob_writer_overwrite_files: Blob writer overwrite files. Type: boolean (or Expression
with resultType boolean).
:type blob_writer_overwrite_files: object
@@ -8652,6 +8953,9 @@ class BlobSink(CopySink):
:type blob_writer_add_header: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
+ :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects
+ (or Expression with resultType array of objects).
+ :type metadata: list[~data_factory_management_client.models.MetadataItem]
"""
_validation = {
@@ -8666,10 +8970,12 @@ class BlobSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'},
'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'},
'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
+ 'metadata': {'key': 'metadata', 'type': '[MetadataItem]'},
}
def __init__(
@@ -8681,18 +8987,21 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
blob_writer_overwrite_files: Optional[object] = None,
blob_writer_date_time_format: Optional[object] = None,
blob_writer_add_header: Optional[object] = None,
copy_behavior: Optional[object] = None,
+ metadata: Optional[List["MetadataItem"]] = None,
**kwargs
):
- super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'BlobSink' # type: str
self.blob_writer_overwrite_files = blob_writer_overwrite_files
self.blob_writer_date_time_format = blob_writer_date_time_format
self.blob_writer_add_header = blob_writer_add_header
self.copy_behavior = copy_behavior
+ self.metadata = metadata
class BlobSource(CopySource):
@@ -8714,6 +9023,9 @@ class BlobSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType
boolean).
:type treat_empty_as_null: object
@@ -8735,6 +9047,7 @@ class BlobSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'},
'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
@@ -8747,12 +9060,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
treat_empty_as_null: Optional[object] = None,
skip_header_line_count: Optional[object] = None,
recursive: Optional[object] = None,
**kwargs
):
- super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'BlobSource' # type: str
self.treat_empty_as_null = treat_empty_as_null
self.skip_header_line_count = skip_header_line_count
@@ -8931,12 +9245,15 @@ class CassandraSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: Database query. Should be a SQL-92 query expression or Cassandra Query Language
(CQL) command. Type: string (or Expression with resultType string).
:type query: object
@@ -8960,8 +9277,9 @@ class CassandraSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'consistency_level': {'key': 'consistencyLevel', 'type': 'str'},
}
@@ -8973,13 +9291,14 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
consistency_level: Optional[Union[str, "CassandraSourceReadConsistencyLevels"]] = None,
**kwargs
):
- super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'CassandraSource' # type: str
self.query = query
self.consistency_level = consistency_level
@@ -9325,8 +9644,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService):
:param deployment_type: Required. The deployment type of the Common Data Service for Apps
instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common
Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType
- string). Possible values include: "Online", "OnPremisesWithIfd".
- :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType
+ string).
+ :type deployment_type: object
:param host_name: The host name of the on-premises Common Data Service for Apps server. The
property is required for on-prem and not allowed for online. Type: string (or Expression with
resultType string).
@@ -9347,10 +9666,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService):
:param authentication_type: Required. The authentication type to connect to Common Data Service
for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario.
'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or
- Expression with resultType string). Possible values include: "Office365", "Ifd",
- "AADServicePrincipal".
- :type authentication_type: str or
- ~data_factory_management_client.models.DynamicsAuthenticationType
+ Expression with resultType string).
+ :type authentication_type: object
:param username: User name to access the Common Data Service for Apps instance. Type: string
(or Expression with resultType string).
:type username: object
@@ -9361,10 +9678,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService):
:type service_principal_id: object
:param service_principal_credential_type: The service principal credential type to use in
Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert'
- for certificate. Type: string (or Expression with resultType string). Possible values include:
- "ServicePrincipalKey", "ServicePrincipalCert".
- :type service_principal_credential_type: str or
- ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType
+ for certificate. Type: string (or Expression with resultType string).
+ :type service_principal_credential_type: object
:param service_principal_credential: The credential of the service principal object in Azure
Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey',
servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If
@@ -9390,16 +9705,16 @@ class CommonDataServiceForAppsLinkedService(LinkedService):
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
- 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'},
+ 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'},
'host_name': {'key': 'typeProperties.hostName', 'type': 'object'},
'port': {'key': 'typeProperties.port', 'type': 'object'},
'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'},
'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'},
- 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'},
+ 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'},
'username': {'key': 'typeProperties.username', 'type': 'object'},
'password': {'key': 'typeProperties.password', 'type': 'SecretBase'},
'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'},
- 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'},
+ 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'},
'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
}
@@ -9407,8 +9722,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService):
def __init__(
self,
*,
- deployment_type: Union[str, "DynamicsDeploymentType"],
- authentication_type: Union[str, "DynamicsAuthenticationType"],
+ deployment_type: object,
+ authentication_type: object,
additional_properties: Optional[Dict[str, object]] = None,
connect_via: Optional["IntegrationRuntimeReference"] = None,
description: Optional[str] = None,
@@ -9421,7 +9736,7 @@ def __init__(
username: Optional[object] = None,
password: Optional["SecretBase"] = None,
service_principal_id: Optional[object] = None,
- service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None,
+ service_principal_credential_type: Optional[object] = None,
service_principal_credential: Optional["SecretBase"] = None,
encrypted_credential: Optional[object] = None,
**kwargs
@@ -9467,6 +9782,9 @@ class CommonDataServiceForAppsSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: Required. The write behavior for the operation. Possible values include:
"Upsert".
:type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior
@@ -9492,6 +9810,7 @@ class CommonDataServiceForAppsSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'str'},
'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'},
'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'},
@@ -9507,11 +9826,12 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
ignore_null_values: Optional[object] = None,
alternate_key_name: Optional[object] = None,
**kwargs
):
- super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'CommonDataServiceForAppsSink' # type: str
self.write_behavior = write_behavior
self.ignore_null_values = ignore_null_values
@@ -9537,12 +9857,15 @@ class CommonDataServiceForAppsSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: FetchXML is a proprietary query language that is used in Microsoft Common Data
Service for Apps (online & on-premises). Type: string (or Expression with resultType string).
:type query: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -9555,8 +9878,9 @@ class CommonDataServiceForAppsSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -9566,11 +9890,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
**kwargs
):
- super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'CommonDataServiceForAppsSource' # type: str
self.query = query
self.additional_columns = additional_columns
@@ -9836,12 +10161,15 @@ class ConcurSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -9857,8 +10185,9 @@ class ConcurSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -9869,12 +10198,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'ConcurSource' # type: str
self.query = query
@@ -10458,6 +10788,9 @@ class CosmosDbMongoDbApiSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: Specifies whether the document with same key to be overwritten (upsert)
rather than throw exception (insert). The default value is "insert". Type: string (or
Expression with resultType string). Type: string (or Expression with resultType string).
@@ -10476,6 +10809,7 @@ class CosmosDbMongoDbApiSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
}
@@ -10488,10 +10822,11 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
write_behavior: Optional[object] = None,
**kwargs
):
- super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'CosmosDbMongoDbApiSink' # type: str
self.write_behavior = write_behavior
@@ -10515,6 +10850,9 @@ class CosmosDbMongoDbApiSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param filter: Specifies selection filter using query operators. To return all documents in a
collection, omit this parameter or pass an empty document ({}). Type: string (or Expression
with resultType string).
@@ -10530,8 +10868,8 @@ class CosmosDbMongoDbApiSource(CopySource):
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -10544,11 +10882,12 @@ class CosmosDbMongoDbApiSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'filter': {'key': 'filter', 'type': 'object'},
'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'},
'batch_size': {'key': 'batchSize', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -10558,14 +10897,15 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
filter: Optional[object] = None,
cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None,
batch_size: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
**kwargs
):
- super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'CosmosDbMongoDbApiSource' # type: str
self.filter = filter
self.cursor_methods = cursor_methods
@@ -10669,6 +11009,9 @@ class CosmosDbSqlApiSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or
Expression with resultType string). Allowed values: insert and upsert.
:type write_behavior: object
@@ -10686,6 +11029,7 @@ class CosmosDbSqlApiSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
}
@@ -10698,10 +11042,11 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
write_behavior: Optional[object] = None,
**kwargs
):
- super(CosmosDbSqlApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(CosmosDbSqlApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'CosmosDbSqlApiSink' # type: str
self.write_behavior = write_behavior
@@ -10725,6 +11070,9 @@ class CosmosDbSqlApiSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: SQL API query. Type: string (or Expression with resultType string).
:type query: object
:param page_size: Page size of the result. Type: integer (or Expression with resultType
@@ -10737,8 +11085,8 @@ class CosmosDbSqlApiSource(CopySource):
Expression with resultType boolean).
:type detect_datetime: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -10751,11 +11099,12 @@ class CosmosDbSqlApiSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'page_size': {'key': 'pageSize', 'type': 'object'},
'preferred_regions': {'key': 'preferredRegions', 'type': 'object'},
'detect_datetime': {'key': 'detectDatetime', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -10765,14 +11114,15 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query: Optional[object] = None,
page_size: Optional[object] = None,
preferred_regions: Optional[object] = None,
detect_datetime: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
**kwargs
):
- super(CosmosDbSqlApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(CosmosDbSqlApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'CosmosDbSqlApiSource' # type: str
self.query = query
self.page_size = page_size
@@ -10865,12 +11215,15 @@ class CouchbaseSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -10886,8 +11239,9 @@ class CouchbaseSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -10898,12 +11252,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'CouchbaseSource' # type: str
self.query = query
@@ -11107,6 +11462,181 @@ def __init__(
self.run_id = run_id
+class Credential(msrest.serialization.Model):
+ """The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute resource.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: ManagedIdentityCredential, ServicePrincipalCredential.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :type additional_properties: dict[str, object]
+ :param type: Required. Type of credential.Constant filled by server.
+ :type type: str
+ :param description: Credential description.
+ :type description: str
+ :param annotations: List of tags that can be used for describing the Credential.
+ :type annotations: list[object]
+ """
+
+ _validation = {
+ 'type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'additional_properties': {'key': '', 'type': '{object}'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'annotations': {'key': 'annotations', 'type': '[object]'},
+ }
+
+ _subtype_map = {
+ 'type': {'ManagedIdentity': 'ManagedIdentityCredential', 'ServicePrincipal': 'ServicePrincipalCredential'}
+ }
+
+ def __init__(
+ self,
+ *,
+ additional_properties: Optional[Dict[str, object]] = None,
+ description: Optional[str] = None,
+ annotations: Optional[List[object]] = None,
+ **kwargs
+ ):
+ super(Credential, self).__init__(**kwargs)
+ self.additional_properties = additional_properties
+ self.type = 'Credential' # type: str
+ self.description = description
+ self.annotations = annotations
+
+
+class CredentialReference(msrest.serialization.Model):
+ """Credential reference type.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :type additional_properties: dict[str, object]
+ :ivar type: Required. Credential reference type. Default value: "CredentialReference".
+ :vartype type: str
+ :param reference_name: Required. Reference credential name.
+ :type reference_name: str
+ """
+
+ _validation = {
+ 'type': {'required': True, 'constant': True},
+ 'reference_name': {'required': True},
+ }
+
+ _attribute_map = {
+ 'additional_properties': {'key': '', 'type': '{object}'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'reference_name': {'key': 'referenceName', 'type': 'str'},
+ }
+
+ type = "CredentialReference"
+
+ def __init__(
+ self,
+ *,
+ reference_name: str,
+ additional_properties: Optional[Dict[str, object]] = None,
+ **kwargs
+ ):
+ super(CredentialReference, self).__init__(**kwargs)
+ self.additional_properties = additional_properties
+ self.reference_name = reference_name
+
+
+class SubResource(msrest.serialization.Model):
+ """Azure Data Factory nested resource, which belongs to a factory.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The resource identifier.
+ :vartype id: str
+ :ivar name: The resource name.
+ :vartype name: str
+ :ivar type: The resource type.
+ :vartype type: str
+ :ivar etag: Etag identifies change in the resource.
+ :vartype etag: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'etag': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'etag': {'key': 'etag', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SubResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.etag = None
+
+
+class CredentialResource(SubResource):
+ """Credential resource type.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: The resource identifier.
+ :vartype id: str
+ :ivar name: The resource name.
+ :vartype name: str
+ :ivar type: The resource type.
+ :vartype type: str
+ :ivar etag: Etag identifies change in the resource.
+ :vartype etag: str
+ :param properties: Required. Properties of credentials.
+ :type properties: ~data_factory_management_client.models.Credential
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'etag': {'readonly': True},
+ 'properties': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'etag': {'key': 'etag', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'Credential'},
+ }
+
+ def __init__(
+ self,
+ *,
+ properties: "Credential",
+ **kwargs
+ ):
+ super(CredentialResource, self).__init__(**kwargs)
+ self.properties = properties
+
+
class CustomActivity(ExecutionActivity):
"""Custom activity type.
@@ -12092,46 +12622,6 @@ def __init__(
self.dataset_parameters = dataset_parameters
-class SubResource(msrest.serialization.Model):
- """Azure Data Factory nested resource, which belongs to a factory.
-
- Variables are only populated by the server, and will be ignored when sending a request.
-
- :ivar id: The resource identifier.
- :vartype id: str
- :ivar name: The resource name.
- :vartype name: str
- :ivar type: The resource type.
- :vartype type: str
- :ivar etag: Etag identifies change in the resource.
- :vartype etag: str
- """
-
- _validation = {
- 'id': {'readonly': True},
- 'name': {'readonly': True},
- 'type': {'readonly': True},
- 'etag': {'readonly': True},
- }
-
- _attribute_map = {
- 'id': {'key': 'id', 'type': 'str'},
- 'name': {'key': 'name', 'type': 'str'},
- 'type': {'key': 'type', 'type': 'str'},
- 'etag': {'key': 'etag', 'type': 'str'},
- }
-
- def __init__(
- self,
- **kwargs
- ):
- super(SubResource, self).__init__(**kwargs)
- self.id = None
- self.name = None
- self.type = None
- self.etag = None
-
-
class DataFlowResource(SubResource):
"""Data flow resource type.
@@ -12468,8 +12958,9 @@ class DatasetCompression(msrest.serialization.Model):
:param additional_properties: Unmatched properties from the message are deserialized to this
collection.
:type additional_properties: dict[str, object]
- :param type: Required. Type of dataset compression.Constant filled by server.
- :type type: str
+ :param type: Required. Type of dataset compression. Type: string (or Expression with resultType
+ string).Constant filled by server.
+ :type type: object
"""
_validation = {
@@ -12478,7 +12969,7 @@ class DatasetCompression(msrest.serialization.Model):
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
- 'type': {'key': 'type', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'object'},
}
_subtype_map = {
@@ -12504,8 +12995,9 @@ class DatasetBZip2Compression(DatasetCompression):
:param additional_properties: Unmatched properties from the message are deserialized to this
collection.
:type additional_properties: dict[str, object]
- :param type: Required. Type of dataset compression.Constant filled by server.
- :type type: str
+ :param type: Required. Type of dataset compression. Type: string (or Expression with resultType
+ string).Constant filled by server.
+ :type type: object
"""
_validation = {
@@ -12514,7 +13006,7 @@ class DatasetBZip2Compression(DatasetCompression):
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
- 'type': {'key': 'type', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'object'},
}
def __init__(
@@ -12592,10 +13084,11 @@ class DatasetDeflateCompression(DatasetCompression):
:param additional_properties: Unmatched properties from the message are deserialized to this
collection.
:type additional_properties: dict[str, object]
- :param type: Required. Type of dataset compression.Constant filled by server.
- :type type: str
- :param level: The Deflate compression level. Possible values include: "Optimal", "Fastest".
- :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel
+ :param type: Required. Type of dataset compression. Type: string (or Expression with resultType
+ string).Constant filled by server.
+ :type type: object
+ :param level: The Deflate compression level.
+ :type level: object
"""
_validation = {
@@ -12604,15 +13097,15 @@ class DatasetDeflateCompression(DatasetCompression):
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
- 'type': {'key': 'type', 'type': 'str'},
- 'level': {'key': 'level', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'object'},
+ 'level': {'key': 'level', 'type': 'object'},
}
def __init__(
self,
*,
additional_properties: Optional[Dict[str, object]] = None,
- level: Optional[Union[str, "DatasetCompressionLevel"]] = None,
+ level: Optional[object] = None,
**kwargs
):
super(DatasetDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs)
@@ -12649,10 +13142,11 @@ class DatasetGZipCompression(DatasetCompression):
:param additional_properties: Unmatched properties from the message are deserialized to this
collection.
:type additional_properties: dict[str, object]
- :param type: Required. Type of dataset compression.Constant filled by server.
- :type type: str
- :param level: The GZip compression level. Possible values include: "Optimal", "Fastest".
- :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel
+ :param type: Required. Type of dataset compression. Type: string (or Expression with resultType
+ string).Constant filled by server.
+ :type type: object
+ :param level: The GZip compression level.
+ :type level: object
"""
_validation = {
@@ -12661,15 +13155,15 @@ class DatasetGZipCompression(DatasetCompression):
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
- 'type': {'key': 'type', 'type': 'str'},
- 'level': {'key': 'level', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'object'},
+ 'level': {'key': 'level', 'type': 'object'},
}
def __init__(
self,
*,
additional_properties: Optional[Dict[str, object]] = None,
- level: Optional[Union[str, "DatasetCompressionLevel"]] = None,
+ level: Optional[object] = None,
**kwargs
):
super(DatasetGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs)
@@ -12834,8 +13328,9 @@ class DatasetTarCompression(DatasetCompression):
:param additional_properties: Unmatched properties from the message are deserialized to this
collection.
:type additional_properties: dict[str, object]
- :param type: Required. Type of dataset compression.Constant filled by server.
- :type type: str
+ :param type: Required. Type of dataset compression. Type: string (or Expression with resultType
+ string).Constant filled by server.
+ :type type: object
"""
_validation = {
@@ -12844,7 +13339,7 @@ class DatasetTarCompression(DatasetCompression):
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
- 'type': {'key': 'type', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'object'},
}
def __init__(
@@ -12865,10 +13360,11 @@ class DatasetTarGZipCompression(DatasetCompression):
:param additional_properties: Unmatched properties from the message are deserialized to this
collection.
:type additional_properties: dict[str, object]
- :param type: Required. Type of dataset compression.Constant filled by server.
- :type type: str
- :param level: The TarGZip compression level. Possible values include: "Optimal", "Fastest".
- :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel
+ :param type: Required. Type of dataset compression. Type: string (or Expression with resultType
+ string).Constant filled by server.
+ :type type: object
+ :param level: The TarGZip compression level.
+ :type level: object
"""
_validation = {
@@ -12877,15 +13373,15 @@ class DatasetTarGZipCompression(DatasetCompression):
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
- 'type': {'key': 'type', 'type': 'str'},
- 'level': {'key': 'level', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'object'},
+ 'level': {'key': 'level', 'type': 'object'},
}
def __init__(
self,
*,
additional_properties: Optional[Dict[str, object]] = None,
- level: Optional[Union[str, "DatasetCompressionLevel"]] = None,
+ level: Optional[object] = None,
**kwargs
):
super(DatasetTarGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs)
@@ -12901,10 +13397,11 @@ class DatasetZipDeflateCompression(DatasetCompression):
:param additional_properties: Unmatched properties from the message are deserialized to this
collection.
:type additional_properties: dict[str, object]
- :param type: Required. Type of dataset compression.Constant filled by server.
- :type type: str
- :param level: The ZipDeflate compression level. Possible values include: "Optimal", "Fastest".
- :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel
+ :param type: Required. Type of dataset compression. Type: string (or Expression with resultType
+ string).Constant filled by server.
+ :type type: object
+ :param level: The ZipDeflate compression level.
+ :type level: object
"""
_validation = {
@@ -12913,15 +13410,15 @@ class DatasetZipDeflateCompression(DatasetCompression):
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
- 'type': {'key': 'type', 'type': 'str'},
- 'level': {'key': 'level', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'object'},
+ 'level': {'key': 'level', 'type': 'object'},
}
def __init__(
self,
*,
additional_properties: Optional[Dict[str, object]] = None,
- level: Optional[Union[str, "DatasetCompressionLevel"]] = None,
+ level: Optional[object] = None,
**kwargs
):
super(DatasetZipDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs)
@@ -13051,12 +13548,15 @@ class Db2Source(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
"""
@@ -13071,8 +13571,9 @@ class Db2Source(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -13083,12 +13584,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'Db2Source' # type: str
self.query = query
@@ -13326,12 +13828,11 @@ class DelimitedTextDataset(Dataset):
https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with
resultType string).
:type encoding_name: object
- :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2",
- "deflate", "zipDeflate", "lz4", "tar", "tarGZip".
- :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec
- :param compression_level: The data compression method used for DelimitedText. Possible values
- include: "Optimal", "Fastest".
- :type compression_level: str or ~data_factory_management_client.models.DatasetCompressionLevel
+ :param compression_codec: The data compressionCodec. Type: string (or Expression with
+ resultType string).
+ :type compression_codec: object
+ :param compression_level: The data compression method used for DelimitedText.
+ :type compression_level: object
:param quote_char: The quote character. Type: string (or Expression with resultType string).
:type quote_char: object
:param escape_char: The escape character. Type: string (or Expression with resultType string).
@@ -13363,8 +13864,8 @@ class DelimitedTextDataset(Dataset):
'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'},
'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'},
'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'},
- 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'},
- 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'str'},
+ 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'},
+ 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'},
'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'},
'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'},
'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'},
@@ -13386,8 +13887,8 @@ def __init__(
column_delimiter: Optional[object] = None,
row_delimiter: Optional[object] = None,
encoding_name: Optional[object] = None,
- compression_codec: Optional[Union[str, "CompressionCodec"]] = None,
- compression_level: Optional[Union[str, "DatasetCompressionLevel"]] = None,
+ compression_codec: Optional[object] = None,
+ compression_level: Optional[object] = None,
quote_char: Optional[object] = None,
escape_char: Optional[object] = None,
first_row_as_header: Optional[object] = None,
@@ -13475,6 +13976,9 @@ class DelimitedTextSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: DelimitedText store settings.
:type store_settings: ~data_factory_management_client.models.StoreWriteSettings
:param format_settings: DelimitedText format settings.
@@ -13493,6 +13997,7 @@ class DelimitedTextSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'},
}
@@ -13506,11 +14011,12 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
store_settings: Optional["StoreWriteSettings"] = None,
format_settings: Optional["DelimitedTextWriteSettings"] = None,
**kwargs
):
- super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'DelimitedTextSink' # type: str
self.store_settings = store_settings
self.format_settings = format_settings
@@ -13535,13 +14041,16 @@ class DelimitedTextSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: DelimitedText store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param format_settings: DelimitedText format settings.
:type format_settings: ~data_factory_management_client.models.DelimitedTextReadSettings
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -13554,9 +14063,10 @@ class DelimitedTextSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -13566,12 +14076,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
store_settings: Optional["StoreReadSettings"] = None,
format_settings: Optional["DelimitedTextReadSettings"] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
**kwargs
):
- super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'DelimitedTextSource' # type: str
self.store_settings = store_settings
self.format_settings = format_settings
@@ -13804,6 +14315,9 @@ class DocumentDbCollectionSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or
Expression with resultType string).
:type nesting_separator: object
@@ -13824,6 +14338,7 @@ class DocumentDbCollectionSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
}
@@ -13837,11 +14352,12 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
nesting_separator: Optional[object] = None,
write_behavior: Optional[object] = None,
**kwargs
):
- super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'DocumentDbCollectionSink' # type: str
self.nesting_separator = nesting_separator
self.write_behavior = write_behavior
@@ -13866,6 +14382,9 @@ class DocumentDbCollectionSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Documents query. Type: string (or Expression with resultType string).
:type query: object
:param nesting_separator: Nested properties separator. Type: string (or Expression with
@@ -13875,8 +14394,8 @@ class DocumentDbCollectionSource(CopySource):
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -13889,10 +14408,11 @@ class DocumentDbCollectionSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -13902,13 +14422,14 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query: Optional[object] = None,
nesting_separator: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
**kwargs
):
- super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'DocumentDbCollectionSource' # type: str
self.query = query
self.nesting_separator = nesting_separator
@@ -14000,12 +14521,15 @@ class DrillSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -14021,8 +14545,9 @@ class DrillSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -14033,12 +14558,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'DrillSource' # type: str
self.query = query
@@ -14361,12 +14887,15 @@ class DynamicsAxSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -14387,8 +14916,9 @@ class DynamicsAxSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'},
}
@@ -14400,13 +14930,14 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
http_request_timeout: Optional[object] = None,
**kwargs
):
- super(DynamicsAxSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(DynamicsAxSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'DynamicsAXSource' # type: str
self.query = query
self.http_request_timeout = http_request_timeout
@@ -14501,9 +15032,8 @@ class DynamicsCrmLinkedService(LinkedService):
:type annotations: list[object]
:param deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online'
for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type:
- string (or Expression with resultType string). Possible values include: "Online",
- "OnPremisesWithIfd".
- :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType
+ string (or Expression with resultType string).
+ :type deployment_type: object
:param host_name: The host name of the on-premises Dynamics CRM server. The property is
required for on-prem and not allowed for online. Type: string (or Expression with resultType
string).
@@ -14522,10 +15052,8 @@ class DynamicsCrmLinkedService(LinkedService):
:param authentication_type: Required. The authentication type to connect to Dynamics CRM
server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario,
'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or
- Expression with resultType string). Possible values include: "Office365", "Ifd",
- "AADServicePrincipal".
- :type authentication_type: str or
- ~data_factory_management_client.models.DynamicsAuthenticationType
+ Expression with resultType string).
+ :type authentication_type: object
:param username: User name to access the Dynamics CRM instance. Type: string (or Expression
with resultType string).
:type username: object
@@ -14536,10 +15064,8 @@ class DynamicsCrmLinkedService(LinkedService):
:type service_principal_id: object
:param service_principal_credential_type: The service principal credential type to use in
Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert'
- for certificate. Type: string (or Expression with resultType string). Possible values include:
- "ServicePrincipalKey", "ServicePrincipalCert".
- :type service_principal_credential_type: str or
- ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType
+ for certificate. Type: string (or Expression with resultType string).
+ :type service_principal_credential_type: object
:param service_principal_credential: The credential of the service principal object in Azure
Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey',
servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If
@@ -14565,16 +15091,16 @@ class DynamicsCrmLinkedService(LinkedService):
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
- 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'},
+ 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'},
'host_name': {'key': 'typeProperties.hostName', 'type': 'object'},
'port': {'key': 'typeProperties.port', 'type': 'object'},
'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'},
'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'},
- 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'},
+ 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'},
'username': {'key': 'typeProperties.username', 'type': 'object'},
'password': {'key': 'typeProperties.password', 'type': 'SecretBase'},
'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'},
- 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'},
+ 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'},
'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
}
@@ -14582,8 +15108,8 @@ class DynamicsCrmLinkedService(LinkedService):
def __init__(
self,
*,
- deployment_type: Union[str, "DynamicsDeploymentType"],
- authentication_type: Union[str, "DynamicsAuthenticationType"],
+ deployment_type: object,
+ authentication_type: object,
additional_properties: Optional[Dict[str, object]] = None,
connect_via: Optional["IntegrationRuntimeReference"] = None,
description: Optional[str] = None,
@@ -14596,7 +15122,7 @@ def __init__(
username: Optional[object] = None,
password: Optional["SecretBase"] = None,
service_principal_id: Optional[object] = None,
- service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None,
+ service_principal_credential_type: Optional[object] = None,
service_principal_credential: Optional["SecretBase"] = None,
encrypted_credential: Optional[object] = None,
**kwargs
@@ -14642,6 +15168,9 @@ class DynamicsCrmSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: Required. The write behavior for the operation. Possible values include:
"Upsert".
:type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior
@@ -14667,6 +15196,7 @@ class DynamicsCrmSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'str'},
'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'},
'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'},
@@ -14682,11 +15212,12 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
ignore_null_values: Optional[object] = None,
alternate_key_name: Optional[object] = None,
**kwargs
):
- super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'DynamicsCrmSink' # type: str
self.write_behavior = write_behavior
self.ignore_null_values = ignore_null_values
@@ -14712,12 +15243,15 @@ class DynamicsCrmSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM
(online & on-premises). Type: string (or Expression with resultType string).
:type query: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -14730,8 +15264,9 @@ class DynamicsCrmSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -14741,11 +15276,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
**kwargs
):
- super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'DynamicsCrmSource' # type: str
self.query = query
self.additional_columns = additional_columns
@@ -14840,8 +15376,8 @@ class DynamicsLinkedService(LinkedService):
:type annotations: list[object]
:param deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for
Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or
- Expression with resultType string). Possible values include: "Online", "OnPremisesWithIfd".
- :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType
+ Expression with resultType string).
+ :type deployment_type: object
:param host_name: The host name of the on-premises Dynamics server. The property is required
for on-prem and not allowed for online. Type: string (or Expression with resultType string).
:type host_name: object
@@ -14859,9 +15395,8 @@ class DynamicsLinkedService(LinkedService):
:param authentication_type: Required. The authentication type to connect to Dynamics server.
'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal'
for Server-To-Server authentication in online scenario. Type: string (or Expression with
- resultType string). Possible values include: "Office365", "Ifd", "AADServicePrincipal".
- :type authentication_type: str or
- ~data_factory_management_client.models.DynamicsAuthenticationType
+ resultType string).
+ :type authentication_type: object
:param username: User name to access the Dynamics instance. Type: string (or Expression with
resultType string).
:type username: object
@@ -14872,10 +15407,8 @@ class DynamicsLinkedService(LinkedService):
:type service_principal_id: object
:param service_principal_credential_type: The service principal credential type to use in
Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert'
- for certificate. Type: string (or Expression with resultType string). Possible values include:
- "ServicePrincipalKey", "ServicePrincipalCert".
- :type service_principal_credential_type: str or
- ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType
+ for certificate. Type: string (or Expression with resultType string).
+ :type service_principal_credential_type: str
:param service_principal_credential: The credential of the service principal object in Azure
Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey',
servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If
@@ -14901,12 +15434,12 @@ class DynamicsLinkedService(LinkedService):
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
- 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'},
+ 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'},
'host_name': {'key': 'typeProperties.hostName', 'type': 'object'},
'port': {'key': 'typeProperties.port', 'type': 'object'},
'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'},
'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'},
- 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'},
+ 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'},
'username': {'key': 'typeProperties.username', 'type': 'object'},
'password': {'key': 'typeProperties.password', 'type': 'SecretBase'},
'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'},
@@ -14918,8 +15451,8 @@ class DynamicsLinkedService(LinkedService):
def __init__(
self,
*,
- deployment_type: Union[str, "DynamicsDeploymentType"],
- authentication_type: Union[str, "DynamicsAuthenticationType"],
+ deployment_type: object,
+ authentication_type: object,
additional_properties: Optional[Dict[str, object]] = None,
connect_via: Optional["IntegrationRuntimeReference"] = None,
description: Optional[str] = None,
@@ -14932,7 +15465,7 @@ def __init__(
username: Optional[object] = None,
password: Optional["SecretBase"] = None,
service_principal_id: Optional[object] = None,
- service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None,
+ service_principal_credential_type: Optional[str] = None,
service_principal_credential: Optional["SecretBase"] = None,
encrypted_credential: Optional[object] = None,
**kwargs
@@ -14978,6 +15511,9 @@ class DynamicsSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: Required. The write behavior for the operation. Possible values include:
"Upsert".
:type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior
@@ -15003,6 +15539,7 @@ class DynamicsSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'str'},
'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'},
'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'},
@@ -15018,11 +15555,12 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
ignore_null_values: Optional[object] = None,
alternate_key_name: Optional[object] = None,
**kwargs
):
- super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'DynamicsSink' # type: str
self.write_behavior = write_behavior
self.ignore_null_values = ignore_null_values
@@ -15048,12 +15586,15 @@ class DynamicsSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics
(online & on-premises). Type: string (or Expression with resultType string).
:type query: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -15066,8 +15607,9 @@ class DynamicsSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -15077,11 +15619,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
**kwargs
):
- super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'DynamicsSource' # type: str
self.query = query
self.additional_columns = additional_columns
@@ -15265,12 +15808,15 @@ class EloquaSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -15286,8 +15832,9 @@ class EloquaSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -15298,12 +15845,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'EloquaSource' # type: str
self.query = query
@@ -15448,9 +15996,12 @@ class ExcelDataset(Dataset):
:type folder: ~data_factory_management_client.models.DatasetFolder
:param location: The location of the excel storage.
:type location: ~data_factory_management_client.models.DatasetLocation
- :param sheet_name: The sheet of excel file. Type: string (or Expression with resultType
+ :param sheet_name: The sheet name of excel file. Type: string (or Expression with resultType
string).
:type sheet_name: object
+ :param sheet_index: The sheet index of excel file and default value is 0. Type: integer (or
+ Expression with resultType integer).
+ :type sheet_index: object
:param range: The partial data of one sheet. Type: string (or Expression with resultType
string).
:type range: object
@@ -15481,6 +16032,7 @@ class ExcelDataset(Dataset):
'folder': {'key': 'folder', 'type': 'DatasetFolder'},
'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'},
'sheet_name': {'key': 'typeProperties.sheetName', 'type': 'object'},
+ 'sheet_index': {'key': 'typeProperties.sheetIndex', 'type': 'object'},
'range': {'key': 'typeProperties.range', 'type': 'object'},
'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'},
'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'},
@@ -15500,6 +16052,7 @@ def __init__(
folder: Optional["DatasetFolder"] = None,
location: Optional["DatasetLocation"] = None,
sheet_name: Optional[object] = None,
+ sheet_index: Optional[object] = None,
range: Optional[object] = None,
first_row_as_header: Optional[object] = None,
compression: Optional["DatasetCompression"] = None,
@@ -15510,6 +16063,7 @@ def __init__(
self.type = 'Excel' # type: str
self.location = location
self.sheet_name = sheet_name
+ self.sheet_index = sheet_index
self.range = range
self.first_row_as_header = first_row_as_header
self.compression = compression
@@ -15535,11 +16089,14 @@ class ExcelSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Excel store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -15552,8 +16109,9 @@ class ExcelSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -15563,11 +16121,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
store_settings: Optional["StoreReadSettings"] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
**kwargs
):
- super(ExcelSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(ExcelSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'ExcelSource' # type: str
self.store_settings = store_settings
self.additional_columns = additional_columns
@@ -16262,6 +16821,10 @@ class FactoryGitHubConfiguration(FactoryRepoConfiguration):
:type last_commit_id: str
:param host_name: GitHub Enterprise host name. For example: https://github.mydomain.com.
:type host_name: str
+ :param client_id: GitHub bring your own app client id.
+ :type client_id: str
+ :param client_secret: GitHub bring your own app client secret information.
+ :type client_secret: ~data_factory_management_client.models.GitHubClientSecret
"""
_validation = {
@@ -16280,6 +16843,8 @@ class FactoryGitHubConfiguration(FactoryRepoConfiguration):
'root_folder': {'key': 'rootFolder', 'type': 'str'},
'last_commit_id': {'key': 'lastCommitId', 'type': 'str'},
'host_name': {'key': 'hostName', 'type': 'str'},
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ 'client_secret': {'key': 'clientSecret', 'type': 'GitHubClientSecret'},
}
def __init__(
@@ -16291,11 +16856,15 @@ def __init__(
root_folder: str,
last_commit_id: Optional[str] = None,
host_name: Optional[str] = None,
+ client_id: Optional[str] = None,
+ client_secret: Optional["GitHubClientSecret"] = None,
**kwargs
):
super(FactoryGitHubConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id, **kwargs)
self.type = 'FactoryGitHubConfiguration' # type: str
self.host_name = host_name
+ self.client_id = client_id
+ self.client_secret = client_secret
class FactoryIdentity(msrest.serialization.Model):
@@ -16614,6 +17183,9 @@ class FileServerReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -16654,6 +17226,7 @@ class FileServerReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -16671,6 +17244,7 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
wildcard_folder_path: Optional[object] = None,
wildcard_file_name: Optional[object] = None,
@@ -16683,7 +17257,7 @@ def __init__(
file_filter: Optional[object] = None,
**kwargs
):
- super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'FileServerReadSettings' # type: str
self.recursive = recursive
self.wildcard_folder_path = wildcard_folder_path
@@ -16710,6 +17284,9 @@ class FileServerWriteSettings(StoreWriteSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
"""
@@ -16722,6 +17299,7 @@ class FileServerWriteSettings(StoreWriteSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
}
@@ -16730,10 +17308,11 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
copy_behavior: Optional[object] = None,
**kwargs
):
- super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs)
+ super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs)
self.type = 'FileServerWriteSettings' # type: str
@@ -16865,6 +17444,9 @@ class FileSystemSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
"""
@@ -16881,6 +17463,7 @@ class FileSystemSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
}
@@ -16893,10 +17476,11 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
copy_behavior: Optional[object] = None,
**kwargs
):
- super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'FileSystemSink' # type: str
self.copy_behavior = copy_behavior
@@ -16920,12 +17504,15 @@ class FileSystemSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -16938,8 +17525,9 @@ class FileSystemSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -16949,11 +17537,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
**kwargs
):
- super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'FileSystemSource' # type: str
self.recursive = recursive
self.additional_columns = additional_columns
@@ -17104,6 +17693,9 @@ class FtpReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -17137,6 +17729,7 @@ class FtpReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -17152,6 +17745,7 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
wildcard_folder_path: Optional[object] = None,
wildcard_file_name: Optional[object] = None,
@@ -17162,7 +17756,7 @@ def __init__(
use_binary_transfer: Optional[bool] = None,
**kwargs
):
- super(FtpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(FtpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'FtpReadSettings' # type: str
self.recursive = recursive
self.wildcard_folder_path = wildcard_folder_path
@@ -17446,6 +18040,8 @@ class GitHubAccessTokenRequest(msrest.serialization.Model):
:type git_hub_access_code: str
:param git_hub_client_id: GitHub application client ID.
:type git_hub_client_id: str
+ :param git_hub_client_secret: GitHub bring your own app client secret information.
+ :type git_hub_client_secret: ~data_factory_management_client.models.GitHubClientSecret
:param git_hub_access_token_base_url: Required. GitHub access token base URL.
:type git_hub_access_token_base_url: str
"""
@@ -17458,6 +18054,7 @@ class GitHubAccessTokenRequest(msrest.serialization.Model):
_attribute_map = {
'git_hub_access_code': {'key': 'gitHubAccessCode', 'type': 'str'},
'git_hub_client_id': {'key': 'gitHubClientId', 'type': 'str'},
+ 'git_hub_client_secret': {'key': 'gitHubClientSecret', 'type': 'GitHubClientSecret'},
'git_hub_access_token_base_url': {'key': 'gitHubAccessTokenBaseUrl', 'type': 'str'},
}
@@ -17467,11 +18064,13 @@ def __init__(
git_hub_access_code: str,
git_hub_access_token_base_url: str,
git_hub_client_id: Optional[str] = None,
+ git_hub_client_secret: Optional["GitHubClientSecret"] = None,
**kwargs
):
super(GitHubAccessTokenRequest, self).__init__(**kwargs)
self.git_hub_access_code = git_hub_access_code
self.git_hub_client_id = git_hub_client_id
+ self.git_hub_client_secret = git_hub_client_secret
self.git_hub_access_token_base_url = git_hub_access_token_base_url
@@ -17496,6 +18095,32 @@ def __init__(
self.git_hub_access_token = git_hub_access_token
+class GitHubClientSecret(msrest.serialization.Model):
+ """Client secret information for factory's bring your own app repository configuration.
+
+ :param byoa_secret_akv_url: Bring your own app client secret AKV URL.
+ :type byoa_secret_akv_url: str
+ :param byoa_secret_name: Bring your own app client secret name in AKV.
+ :type byoa_secret_name: str
+ """
+
+ _attribute_map = {
+ 'byoa_secret_akv_url': {'key': 'byoaSecretAkvUrl', 'type': 'str'},
+ 'byoa_secret_name': {'key': 'byoaSecretName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ byoa_secret_akv_url: Optional[str] = None,
+ byoa_secret_name: Optional[str] = None,
+ **kwargs
+ ):
+ super(GitHubClientSecret, self).__init__(**kwargs)
+ self.byoa_secret_akv_url = byoa_secret_akv_url
+ self.byoa_secret_name = byoa_secret_name
+
+
class GlobalParameterSpecification(msrest.serialization.Model):
"""Definition of a single parameter for an entity.
@@ -17737,12 +18362,15 @@ class GoogleAdWordsSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -17758,8 +18386,9 @@ class GoogleAdWordsSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -17770,12 +18399,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'GoogleAdWordsSource' # type: str
self.query = query
@@ -18004,12 +18634,15 @@ class GoogleBigQuerySource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -18025,8 +18658,9 @@ class GoogleBigQuerySource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -18037,12 +18671,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'GoogleBigQuerySource' # type: str
self.query = query
@@ -18187,6 +18822,9 @@ class GoogleCloudStorageReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -18227,6 +18865,7 @@ class GoogleCloudStorageReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -18244,6 +18883,7 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
wildcard_folder_path: Optional[object] = None,
wildcard_file_name: Optional[object] = None,
@@ -18256,7 +18896,7 @@ def __init__(
modified_datetime_end: Optional[object] = None,
**kwargs
):
- super(GoogleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(GoogleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'GoogleCloudStorageReadSettings' # type: str
self.recursive = recursive
self.wildcard_folder_path = wildcard_folder_path
@@ -18354,12 +18994,15 @@ class GreenplumSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -18375,8 +19018,9 @@ class GreenplumSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -18387,12 +19031,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'GreenplumSource' # type: str
self.query = query
@@ -18679,12 +19324,15 @@ class HBaseSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -18700,8 +19348,9 @@ class HBaseSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -18712,12 +19361,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'HBaseSource' # type: str
self.query = query
@@ -18854,6 +19504,9 @@ class HdfsReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -18893,6 +19546,7 @@ class HdfsReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -18910,6 +19564,7 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
wildcard_folder_path: Optional[object] = None,
wildcard_file_name: Optional[object] = None,
@@ -18922,7 +19577,7 @@ def __init__(
delete_files_after_completion: Optional[object] = None,
**kwargs
):
- super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'HdfsReadSettings' # type: str
self.recursive = recursive
self.wildcard_folder_path = wildcard_folder_path
@@ -18955,6 +19610,9 @@ class HdfsSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -18972,6 +19630,7 @@ class HdfsSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'},
}
@@ -18983,11 +19642,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
distcp_settings: Optional["DistcpSettings"] = None,
**kwargs
):
- super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'HdfsSource' # type: str
self.recursive = recursive
self.distcp_settings = distcp_settings
@@ -19407,6 +20067,8 @@ class HdInsightOnDemandLinkedService(LinkedService):
:param subnet_name: The ARM resource ID for the subnet in the vNet. If virtualNetworkId was
specified, then this property is required. Type: string (or Expression with resultType string).
:type subnet_name: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -19460,6 +20122,7 @@ class HdInsightOnDemandLinkedService(LinkedService):
'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'},
'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'},
'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -19503,6 +20166,7 @@ def __init__(
script_actions: Optional[List["ScriptAction"]] = None,
virtual_network_id: Optional[object] = None,
subnet_name: Optional[object] = None,
+ credential: Optional["CredentialReference"] = None,
**kwargs
):
super(HdInsightOnDemandLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
@@ -19540,6 +20204,7 @@ def __init__(
self.script_actions = script_actions
self.virtual_network_id = virtual_network_id
self.subnet_name = subnet_name
+ self.credential = credential
class HdInsightPigActivity(ExecutionActivity):
@@ -20109,12 +20774,15 @@ class HiveSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -20130,8 +20798,9 @@ class HiveSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -20142,12 +20811,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'HiveSource' # type: str
self.query = query
@@ -20372,6 +21042,9 @@ class HttpReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param request_method: The HTTP method used to call the RESTful API. The default is GET. Type:
string (or Expression with resultType string).
:type request_method: object
@@ -20399,6 +21072,7 @@ class HttpReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'request_method': {'key': 'requestMethod', 'type': 'object'},
'request_body': {'key': 'requestBody', 'type': 'object'},
'additional_headers': {'key': 'additionalHeaders', 'type': 'object'},
@@ -20412,6 +21086,7 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
request_method: Optional[object] = None,
request_body: Optional[object] = None,
additional_headers: Optional[object] = None,
@@ -20420,7 +21095,7 @@ def __init__(
partition_root_path: Optional[object] = None,
**kwargs
):
- super(HttpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(HttpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'HttpReadSettings' # type: str
self.request_method = request_method
self.request_body = request_body
@@ -20496,6 +21171,9 @@ class HttpSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param http_request_timeout: Specifies the timeout for a HTTP client to get HTTP response from
HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string
(or Expression with resultType string), pattern:
@@ -20513,6 +21191,7 @@ class HttpSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'},
}
@@ -20523,10 +21202,11 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
http_request_timeout: Optional[object] = None,
**kwargs
):
- super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'HttpSource' # type: str
self.http_request_timeout = http_request_timeout
@@ -20714,12 +21394,15 @@ class HubspotSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -20735,8 +21418,9 @@ class HubspotSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -20747,12 +21431,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'HubspotSource' # type: str
self.query = query
@@ -21042,12 +21727,15 @@ class ImpalaSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -21063,8 +21751,9 @@ class ImpalaSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -21075,12 +21764,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'ImpalaSource' # type: str
self.query = query
@@ -21196,6 +21886,9 @@ class InformixSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: A query to execute before starting the copy. Type: string (or
Expression with resultType string).
:type pre_copy_script: object
@@ -21213,6 +21906,7 @@ class InformixSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
}
@@ -21225,10 +21919,11 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
pre_copy_script: Optional[object] = None,
**kwargs
):
- super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'InformixSink' # type: str
self.pre_copy_script = pre_copy_script
@@ -21252,12 +21947,15 @@ class InformixSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
"""
@@ -21272,8 +21970,9 @@ class InformixSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -21284,12 +21983,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'InformixSource' # type: str
self.query = query
@@ -21597,6 +22297,9 @@ class IntegrationRuntimeDataFlowProperties(msrest.serialization.Model):
:param time_to_live: Time to live (in minutes) setting of the cluster which will execute data
flow job.
:type time_to_live: int
+ :param cleanup: Cluster will not be recycled and it will be used in next data flow activity run
+ until TTL (time to live) is reached if this is set as false. Default is true.
+ :type cleanup: bool
"""
_validation = {
@@ -21608,6 +22311,7 @@ class IntegrationRuntimeDataFlowProperties(msrest.serialization.Model):
'compute_type': {'key': 'computeType', 'type': 'str'},
'core_count': {'key': 'coreCount', 'type': 'int'},
'time_to_live': {'key': 'timeToLive', 'type': 'int'},
+ 'cleanup': {'key': 'cleanup', 'type': 'bool'},
}
def __init__(
@@ -21617,6 +22321,7 @@ def __init__(
compute_type: Optional[Union[str, "DataFlowComputeType"]] = None,
core_count: Optional[int] = None,
time_to_live: Optional[int] = None,
+ cleanup: Optional[bool] = None,
**kwargs
):
super(IntegrationRuntimeDataFlowProperties, self).__init__(**kwargs)
@@ -21624,6 +22329,7 @@ def __init__(
self.compute_type = compute_type
self.core_count = core_count
self.time_to_live = time_to_live
+ self.cleanup = cleanup
class IntegrationRuntimeDataProxyProperties(msrest.serialization.Model):
@@ -21839,6 +22545,103 @@ def __init__(
self.received_bytes = None
+class IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint(msrest.serialization.Model):
+ """Azure-SSIS integration runtime outbound network dependency endpoints for one category.
+
+ :param category: The category of outbound network dependency.
+ :type category: str
+ :param endpoints: The endpoints for outbound network dependency.
+ :type endpoints:
+ list[~data_factory_management_client.models.IntegrationRuntimeOutboundNetworkDependenciesEndpoint]
+ """
+
+ _attribute_map = {
+ 'category': {'key': 'category', 'type': 'str'},
+ 'endpoints': {'key': 'endpoints', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesEndpoint]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ category: Optional[str] = None,
+ endpoints: Optional[List["IntegrationRuntimeOutboundNetworkDependenciesEndpoint"]] = None,
+ **kwargs
+ ):
+ super(IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint, self).__init__(**kwargs)
+ self.category = category
+ self.endpoints = endpoints
+
+
+class IntegrationRuntimeOutboundNetworkDependenciesEndpoint(msrest.serialization.Model):
+ """The endpoint for Azure-SSIS integration runtime outbound network dependency.
+
+ :param domain_name: The domain name of endpoint.
+ :type domain_name: str
+ :param endpoint_details: The details of endpoint.
+ :type endpoint_details:
+ list[~data_factory_management_client.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails]
+ """
+
+ _attribute_map = {
+ 'domain_name': {'key': 'domainName', 'type': 'str'},
+ 'endpoint_details': {'key': 'endpointDetails', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ domain_name: Optional[str] = None,
+ endpoint_details: Optional[List["IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails"]] = None,
+ **kwargs
+ ):
+ super(IntegrationRuntimeOutboundNetworkDependenciesEndpoint, self).__init__(**kwargs)
+ self.domain_name = domain_name
+ self.endpoint_details = endpoint_details
+
+
+class IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails(msrest.serialization.Model):
+ """The details of Azure-SSIS integration runtime outbound network dependency endpoint.
+
+ :param port: The port of endpoint.
+ :type port: int
+ """
+
+ _attribute_map = {
+ 'port': {'key': 'port', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ port: Optional[int] = None,
+ **kwargs
+ ):
+ super(IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails, self).__init__(**kwargs)
+ self.port = port
+
+
+class IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse(msrest.serialization.Model):
+ """Azure-SSIS integration runtime outbound network dependency endpoints.
+
+ :param value: The list of outbound network dependency endpoints.
+ :type value:
+ list[~data_factory_management_client.models.IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint"]] = None,
+ **kwargs
+ ):
+ super(IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse, self).__init__(**kwargs)
+ self.value = value
+
+
class IntegrationRuntimeReference(msrest.serialization.Model):
"""Integration runtime reference type.
@@ -22031,6 +22834,8 @@ class IntegrationRuntimeSsisProperties(msrest.serialization.Model):
list[~data_factory_management_client.models.CustomSetupBase]
:param package_stores: Package stores for the SSIS Integration Runtime.
:type package_stores: list[~data_factory_management_client.models.PackageStore]
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_attribute_map = {
@@ -22042,6 +22847,7 @@ class IntegrationRuntimeSsisProperties(msrest.serialization.Model):
'edition': {'key': 'edition', 'type': 'str'},
'express_custom_setup_properties': {'key': 'expressCustomSetupProperties', 'type': '[CustomSetupBase]'},
'package_stores': {'key': 'packageStores', 'type': '[PackageStore]'},
+ 'credential': {'key': 'credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -22055,6 +22861,7 @@ def __init__(
edition: Optional[Union[str, "IntegrationRuntimeEdition"]] = None,
express_custom_setup_properties: Optional[List["CustomSetupBase"]] = None,
package_stores: Optional[List["PackageStore"]] = None,
+ credential: Optional["CredentialReference"] = None,
**kwargs
):
super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs)
@@ -22066,6 +22873,7 @@ def __init__(
self.edition = edition
self.express_custom_setup_properties = express_custom_setup_properties
self.package_stores = package_stores
+ self.credential = credential
class IntegrationRuntimeStatus(msrest.serialization.Model):
@@ -22201,6 +23009,9 @@ class IntegrationRuntimeVNetProperties(msrest.serialization.Model):
:param public_i_ps: Resource IDs of the public IP addresses that this integration runtime will
use.
:type public_i_ps: list[str]
+ :param subnet_id: The ID of subnet, to which this Azure-SSIS integration runtime will be
+ joined.
+ :type subnet_id: str
"""
_attribute_map = {
@@ -22208,6 +23019,7 @@ class IntegrationRuntimeVNetProperties(msrest.serialization.Model):
'v_net_id': {'key': 'vNetId', 'type': 'str'},
'subnet': {'key': 'subnet', 'type': 'str'},
'public_i_ps': {'key': 'publicIPs', 'type': '[str]'},
+ 'subnet_id': {'key': 'subnetId', 'type': 'str'},
}
def __init__(
@@ -22217,6 +23029,7 @@ def __init__(
v_net_id: Optional[str] = None,
subnet: Optional[str] = None,
public_i_ps: Optional[List[str]] = None,
+ subnet_id: Optional[str] = None,
**kwargs
):
super(IntegrationRuntimeVNetProperties, self).__init__(**kwargs)
@@ -22224,6 +23037,7 @@ def __init__(
self.v_net_id = v_net_id
self.subnet = subnet
self.public_i_ps = public_i_ps
+ self.subnet_id = subnet_id
class JiraLinkedService(LinkedService):
@@ -22411,12 +23225,15 @@ class JiraSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -22432,8 +23249,9 @@ class JiraSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -22444,12 +23262,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'JiraSource' # type: str
self.query = query
@@ -22551,9 +23370,8 @@ class JsonFormat(DatasetStorageFormat):
:param deserializer: Deserializer. Type: string (or Expression with resultType string).
:type deserializer: object
:param file_pattern: File pattern of JSON. To be more specific, the way of separating a
- collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. Possible
- values include: "setOfObjects", "arrayOfObjects".
- :type file_pattern: str or ~data_factory_management_client.models.JsonFormatFilePattern
+ collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive.
+ :type file_pattern: object
:param nesting_separator: The character used to separate nesting levels. Default value is '.'
(dot). Type: string (or Expression with resultType string).
:type nesting_separator: object
@@ -22583,7 +23401,7 @@ class JsonFormat(DatasetStorageFormat):
'type': {'key': 'type', 'type': 'str'},
'serializer': {'key': 'serializer', 'type': 'object'},
'deserializer': {'key': 'deserializer', 'type': 'object'},
- 'file_pattern': {'key': 'filePattern', 'type': 'str'},
+ 'file_pattern': {'key': 'filePattern', 'type': 'object'},
'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'},
'encoding_name': {'key': 'encodingName', 'type': 'object'},
'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'},
@@ -22596,7 +23414,7 @@ def __init__(
additional_properties: Optional[Dict[str, object]] = None,
serializer: Optional[object] = None,
deserializer: Optional[object] = None,
- file_pattern: Optional[Union[str, "JsonFormatFilePattern"]] = None,
+ file_pattern: Optional[object] = None,
nesting_separator: Optional[object] = None,
encoding_name: Optional[object] = None,
json_node_reference: Optional[object] = None,
@@ -22673,6 +23491,9 @@ class JsonSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Json store settings.
:type store_settings: ~data_factory_management_client.models.StoreWriteSettings
:param format_settings: Json format settings.
@@ -22691,6 +23512,7 @@ class JsonSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'},
}
@@ -22704,11 +23526,12 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
store_settings: Optional["StoreWriteSettings"] = None,
format_settings: Optional["JsonWriteSettings"] = None,
**kwargs
):
- super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'JsonSink' # type: str
self.store_settings = store_settings
self.format_settings = format_settings
@@ -22733,13 +23556,16 @@ class JsonSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Json store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param format_settings: Json format settings.
:type format_settings: ~data_factory_management_client.models.JsonReadSettings
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -22752,9 +23578,10 @@ class JsonSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'JsonReadSettings'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -22764,12 +23591,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
store_settings: Optional["StoreReadSettings"] = None,
format_settings: Optional["JsonReadSettings"] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
**kwargs
):
- super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'JsonSource' # type: str
self.store_settings = store_settings
self.format_settings = format_settings
@@ -22787,9 +23615,8 @@ class JsonWriteSettings(FormatWriteSettings):
:param type: Required. The write setting type.Constant filled by server.
:type type: str
:param file_pattern: File pattern of JSON. This setting controls the way a collection of JSON
- objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. Possible
- values include: "setOfObjects", "arrayOfObjects".
- :type file_pattern: str or ~data_factory_management_client.models.JsonWriteFilePattern
+ objects will be treated. The default value is 'setOfObjects'. It is case-sensitive.
+ :type file_pattern: object
"""
_validation = {
@@ -22799,14 +23626,14 @@ class JsonWriteSettings(FormatWriteSettings):
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
- 'file_pattern': {'key': 'filePattern', 'type': 'str'},
+ 'file_pattern': {'key': 'filePattern', 'type': 'object'},
}
def __init__(
self,
*,
additional_properties: Optional[Dict[str, object]] = None,
- file_pattern: Optional[Union[str, "JsonWriteFilePattern"]] = None,
+ file_pattern: Optional[object] = None,
**kwargs
):
super(JsonWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs)
@@ -23504,12 +24331,15 @@ class MagentoSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -23525,8 +24355,9 @@ class MagentoSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -23537,16 +24368,61 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'MagentoSource' # type: str
self.query = query
+class ManagedIdentityCredential(Credential):
+ """Managed identity credential.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :type additional_properties: dict[str, object]
+ :param type: Required. Type of credential.Constant filled by server.
+ :type type: str
+ :param description: Credential description.
+ :type description: str
+ :param annotations: List of tags that can be used for describing the Credential.
+ :type annotations: list[object]
+ :param resource_id: The resource id of user assigned managed identity.
+ :type resource_id: str
+ """
+
+ _validation = {
+ 'type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'additional_properties': {'key': '', 'type': '{object}'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'annotations': {'key': 'annotations', 'type': '[object]'},
+ 'resource_id': {'key': 'typeProperties.resourceId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ additional_properties: Optional[Dict[str, object]] = None,
+ description: Optional[str] = None,
+ annotations: Optional[List[object]] = None,
+ resource_id: Optional[str] = None,
+ **kwargs
+ ):
+ super(ManagedIdentityCredential, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs)
+ self.type = 'ManagedIdentity' # type: str
+ self.resource_id = resource_id
+
+
class ManagedIntegrationRuntime(IntegrationRuntime):
"""Managed integration runtime, including managed elastic and managed dedicated integration runtimes.
@@ -24245,12 +25121,15 @@ class MariaDbSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -24266,8 +25145,9 @@ class MariaDbSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -24278,12 +25158,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(MariaDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(MariaDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'MariaDBSource' # type: str
self.query = query
@@ -24533,12 +25414,15 @@ class MarketoSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -24554,8 +25438,9 @@ class MarketoSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -24566,16 +25451,43 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'MarketoSource' # type: str
self.query = query
+class MetadataItem(msrest.serialization.Model):
+ """Specify the name and value of custom metadata item.
+
+ :param name: Metadata item key name. Type: string (or Expression with resultType string).
+ :type name: object
+ :param value: Metadata item value. Type: string (or Expression with resultType string).
+ :type value: object
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'object'},
+ 'value': {'key': 'value', 'type': 'object'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[object] = None,
+ value: Optional[object] = None,
+ **kwargs
+ ):
+ super(MetadataItem, self).__init__(**kwargs)
+ self.name = name
+ self.value = value
+
+
class MicrosoftAccessLinkedService(LinkedService):
"""Microsoft Access linked service.
@@ -24687,6 +25599,9 @@ class MicrosoftAccessSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: A query to execute before starting the copy. Type: string (or
Expression with resultType string).
:type pre_copy_script: object
@@ -24704,6 +25619,7 @@ class MicrosoftAccessSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
}
@@ -24716,10 +25632,11 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
pre_copy_script: Optional[object] = None,
**kwargs
):
- super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'MicrosoftAccessSink' # type: str
self.pre_copy_script = pre_copy_script
@@ -24743,11 +25660,14 @@ class MicrosoftAccessSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -24760,8 +25680,9 @@ class MicrosoftAccessSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -24771,11 +25692,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
**kwargs
):
- super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'MicrosoftAccessSource' # type: str
self.query = query
self.additional_columns = additional_columns
@@ -24982,6 +25904,74 @@ def __init__(
self.database = database
+class MongoDbAtlasSink(CopySink):
+ """A copy activity MongoDB Atlas sink.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :type additional_properties: dict[str, object]
+ :param type: Required. Copy sink type.Constant filled by server.
+ :type type: str
+ :param write_batch_size: Write batch size. Type: integer (or Expression with resultType
+ integer), minimum: 0.
+ :type write_batch_size: object
+ :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType
+ string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ :type write_batch_timeout: object
+ :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType
+ integer).
+ :type sink_retry_count: object
+ :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string),
+ pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ :type sink_retry_wait: object
+ :param max_concurrent_connections: The maximum concurrent connection count for the sink data
+ store. Type: integer (or Expression with resultType integer).
+ :type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
+ :param write_behavior: Specifies whether the document with same key to be overwritten (upsert)
+ rather than throw exception (insert). The default value is "insert". Type: string (or
+ Expression with resultType string). Type: string (or Expression with resultType string).
+ :type write_behavior: object
+ """
+
+ _validation = {
+ 'type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'additional_properties': {'key': '', 'type': '{object}'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'},
+ 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'},
+ 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
+ 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
+ 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
+ 'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
+ }
+
+ def __init__(
+ self,
+ *,
+ additional_properties: Optional[Dict[str, object]] = None,
+ write_batch_size: Optional[object] = None,
+ write_batch_timeout: Optional[object] = None,
+ sink_retry_count: Optional[object] = None,
+ sink_retry_wait: Optional[object] = None,
+ max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
+ write_behavior: Optional[object] = None,
+ **kwargs
+ ):
+ super(MongoDbAtlasSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
+ self.type = 'MongoDbAtlasSink' # type: str
+ self.write_behavior = write_behavior
+
+
class MongoDbAtlasSource(CopySource):
"""A copy activity source for a MongoDB Atlas database.
@@ -25001,6 +25991,9 @@ class MongoDbAtlasSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param filter: Specifies selection filter using query operators. To return all documents in a
collection, omit this parameter or pass an empty document ({}). Type: string (or Expression
with resultType string).
@@ -25016,8 +26009,8 @@ class MongoDbAtlasSource(CopySource):
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -25030,11 +26023,12 @@ class MongoDbAtlasSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'filter': {'key': 'filter', 'type': 'object'},
'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'},
'batch_size': {'key': 'batchSize', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -25044,14 +26038,15 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
filter: Optional[object] = None,
cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None,
batch_size: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
**kwargs
):
- super(MongoDbAtlasSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(MongoDbAtlasSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'MongoDbAtlasSource' # type: str
self.filter = filter
self.cursor_methods = cursor_methods
@@ -25308,12 +26303,15 @@ class MongoDbSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Database query. Should be a SQL-92 query expression. Type: string (or Expression
with resultType string).
:type query: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -25326,8 +26324,9 @@ class MongoDbSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -25337,11 +26336,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
**kwargs
):
- super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'MongoDbSource' # type: str
self.query = query
self.additional_columns = additional_columns
@@ -25478,6 +26478,74 @@ def __init__(
self.database = database
+class MongoDbV2Sink(CopySink):
+ """A copy activity MongoDB sink.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :type additional_properties: dict[str, object]
+ :param type: Required. Copy sink type.Constant filled by server.
+ :type type: str
+ :param write_batch_size: Write batch size. Type: integer (or Expression with resultType
+ integer), minimum: 0.
+ :type write_batch_size: object
+ :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType
+ string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ :type write_batch_timeout: object
+ :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType
+ integer).
+ :type sink_retry_count: object
+ :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string),
+ pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ :type sink_retry_wait: object
+ :param max_concurrent_connections: The maximum concurrent connection count for the sink data
+ store. Type: integer (or Expression with resultType integer).
+ :type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
+ :param write_behavior: Specifies whether the document with same key to be overwritten (upsert)
+ rather than throw exception (insert). The default value is "insert". Type: string (or
+ Expression with resultType string). Type: string (or Expression with resultType string).
+ :type write_behavior: object
+ """
+
+ _validation = {
+ 'type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'additional_properties': {'key': '', 'type': '{object}'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'},
+ 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'},
+ 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
+ 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
+ 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
+ 'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
+ }
+
+ def __init__(
+ self,
+ *,
+ additional_properties: Optional[Dict[str, object]] = None,
+ write_batch_size: Optional[object] = None,
+ write_batch_timeout: Optional[object] = None,
+ sink_retry_count: Optional[object] = None,
+ sink_retry_wait: Optional[object] = None,
+ max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
+ write_behavior: Optional[object] = None,
+ **kwargs
+ ):
+ super(MongoDbV2Sink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
+ self.type = 'MongoDbV2Sink' # type: str
+ self.write_behavior = write_behavior
+
+
class MongoDbV2Source(CopySource):
"""A copy activity source for a MongoDB database.
@@ -25497,6 +26565,9 @@ class MongoDbV2Source(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param filter: Specifies selection filter using query operators. To return all documents in a
collection, omit this parameter or pass an empty document ({}). Type: string (or Expression
with resultType string).
@@ -25512,8 +26583,8 @@ class MongoDbV2Source(CopySource):
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -25526,11 +26597,12 @@ class MongoDbV2Source(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'filter': {'key': 'filter', 'type': 'object'},
'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'},
'batch_size': {'key': 'batchSize', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -25540,14 +26612,15 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
filter: Optional[object] = None,
cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None,
batch_size: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
**kwargs
):
- super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'MongoDbV2Source' # type: str
self.filter = filter
self.cursor_methods = cursor_methods
@@ -25640,12 +26713,15 @@ class MySqlSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
"""
@@ -25660,8 +26736,9 @@ class MySqlSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -25672,12 +26749,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'MySqlSource' # type: str
self.query = query
@@ -25870,12 +26948,15 @@ class NetezzaSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -25896,8 +26977,9 @@ class NetezzaSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'partition_option': {'key': 'partitionOption', 'type': 'object'},
'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'},
@@ -25910,14 +26992,15 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
partition_option: Optional[object] = None,
partition_settings: Optional["NetezzaPartitionSettings"] = None,
**kwargs
):
- super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'NetezzaSource' # type: str
self.query = query
self.partition_option = partition_option
@@ -26231,6 +27314,9 @@ class ODataSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: OData query. For example, "$top=1". Type: string (or Expression with resultType
string).
:type query: object
@@ -26240,8 +27326,8 @@ class ODataSource(CopySource):
((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type http_request_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -26254,9 +27340,10 @@ class ODataSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -26266,12 +27353,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query: Optional[object] = None,
http_request_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
**kwargs
):
- super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'ODataSource' # type: str
self.query = query
self.http_request_timeout = http_request_timeout
@@ -26388,6 +27476,9 @@ class OdbcSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: A query to execute before starting the copy. Type: string (or
Expression with resultType string).
:type pre_copy_script: object
@@ -26405,6 +27496,7 @@ class OdbcSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
}
@@ -26417,10 +27509,11 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
pre_copy_script: Optional[object] = None,
**kwargs
):
- super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'OdbcSink' # type: str
self.pre_copy_script = pre_copy_script
@@ -26444,12 +27537,15 @@ class OdbcSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
"""
@@ -26464,8 +27560,9 @@ class OdbcSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -26476,12 +27573,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'OdbcSource' # type: str
self.query = query
@@ -26730,6 +27828,9 @@ class Office365Source(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param allowed_groups: The groups containing all the users. Type: array of strings (or
Expression with resultType array of strings).
:type allowed_groups: object
@@ -26761,6 +27862,7 @@ class Office365Source(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'allowed_groups': {'key': 'allowedGroups', 'type': 'object'},
'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'},
'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'},
@@ -26776,6 +27878,7 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
allowed_groups: Optional[object] = None,
user_scope_filter_uri: Optional[object] = None,
date_filter_column: Optional[object] = None,
@@ -26784,7 +27887,7 @@ def __init__(
output_columns: Optional[object] = None,
**kwargs
):
- super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'Office365Source' # type: str
self.allowed_groups = allowed_groups
self.user_scope_filter_uri = user_scope_filter_uri
@@ -27215,6 +28318,9 @@ class OracleCloudStorageReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -27255,6 +28361,7 @@ class OracleCloudStorageReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -27272,6 +28379,7 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
wildcard_folder_path: Optional[object] = None,
wildcard_file_name: Optional[object] = None,
@@ -27284,7 +28392,7 @@ def __init__(
modified_datetime_end: Optional[object] = None,
**kwargs
):
- super(OracleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(OracleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'OracleCloudStorageReadSettings' # type: str
self.recursive = recursive
self.wildcard_folder_path = wildcard_folder_path
@@ -27585,12 +28693,15 @@ class OracleServiceCloudSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -27606,8 +28717,9 @@ class OracleServiceCloudSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -27618,12 +28730,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'OracleServiceCloudSource' # type: str
self.query = query
@@ -27653,6 +28766,9 @@ class OracleSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType
string).
:type pre_copy_script: object
@@ -27670,6 +28786,7 @@ class OracleSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
}
@@ -27682,10 +28799,11 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
pre_copy_script: Optional[object] = None,
**kwargs
):
- super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'OracleSink' # type: str
self.pre_copy_script = pre_copy_script
@@ -27709,6 +28827,9 @@ class OracleSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param oracle_reader_query: Oracle reader query. Type: string (or Expression with resultType
string).
:type oracle_reader_query: object
@@ -27721,8 +28842,8 @@ class OracleSource(CopySource):
:param partition_settings: The settings that will be leveraged for Oracle source partitioning.
:type partition_settings: ~data_factory_management_client.models.OraclePartitionSettings
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -27735,11 +28856,12 @@ class OracleSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'partition_option': {'key': 'partitionOption', 'type': 'object'},
'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -27749,14 +28871,15 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
oracle_reader_query: Optional[object] = None,
query_timeout: Optional[object] = None,
partition_option: Optional[object] = None,
partition_settings: Optional["OraclePartitionSettings"] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
**kwargs
):
- super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'OracleSource' # type: str
self.oracle_reader_query = oracle_reader_query
self.query_timeout = query_timeout
@@ -27875,8 +28998,9 @@ class OrcDataset(Dataset):
:type folder: ~data_factory_management_client.models.DatasetFolder
:param location: The location of the ORC data storage.
:type location: ~data_factory_management_client.models.DatasetLocation
- :param orc_compression_codec: Possible values include: "none", "zlib", "snappy", "lzo".
- :type orc_compression_codec: str or ~data_factory_management_client.models.OrcCompressionCodec
+ :param orc_compression_codec: The data orcCompressionCodec. Type: string (or Expression with
+ resultType string).
+ :type orc_compression_codec: object
"""
_validation = {
@@ -27895,7 +29019,7 @@ class OrcDataset(Dataset):
'annotations': {'key': 'annotations', 'type': '[object]'},
'folder': {'key': 'folder', 'type': 'DatasetFolder'},
'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'},
- 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'str'},
+ 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'object'},
}
def __init__(
@@ -27910,7 +29034,7 @@ def __init__(
annotations: Optional[List[object]] = None,
folder: Optional["DatasetFolder"] = None,
location: Optional["DatasetLocation"] = None,
- orc_compression_codec: Optional[Union[str, "OrcCompressionCodec"]] = None,
+ orc_compression_codec: Optional[object] = None,
**kwargs
):
super(OrcDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs)
@@ -27983,6 +29107,9 @@ class OrcSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: ORC store settings.
:type store_settings: ~data_factory_management_client.models.StoreWriteSettings
:param format_settings: ORC format settings.
@@ -28001,6 +29128,7 @@ class OrcSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'},
}
@@ -28014,11 +29142,12 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
store_settings: Optional["StoreWriteSettings"] = None,
format_settings: Optional["OrcWriteSettings"] = None,
**kwargs
):
- super(OrcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(OrcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'OrcSink' # type: str
self.store_settings = store_settings
self.format_settings = format_settings
@@ -28043,11 +29172,14 @@ class OrcSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: ORC store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -28060,8 +29192,9 @@ class OrcSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -28071,11 +29204,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
store_settings: Optional["StoreReadSettings"] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
**kwargs
):
- super(OrcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(OrcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'OrcSource' # type: str
self.store_settings = store_settings
self.additional_columns = additional_columns
@@ -28220,9 +29354,9 @@ class ParquetDataset(Dataset):
:type folder: ~data_factory_management_client.models.DatasetFolder
:param location: The location of the parquet storage.
:type location: ~data_factory_management_client.models.DatasetLocation
- :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2",
- "deflate", "zipDeflate", "lz4", "tar", "tarGZip".
- :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec
+ :param compression_codec: The data compressionCodec. Type: string (or Expression with
+ resultType string).
+ :type compression_codec: object
"""
_validation = {
@@ -28241,7 +29375,7 @@ class ParquetDataset(Dataset):
'annotations': {'key': 'annotations', 'type': '[object]'},
'folder': {'key': 'folder', 'type': 'DatasetFolder'},
'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'},
- 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'},
+ 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'},
}
def __init__(
@@ -28256,7 +29390,7 @@ def __init__(
annotations: Optional[List[object]] = None,
folder: Optional["DatasetFolder"] = None,
location: Optional["DatasetLocation"] = None,
- compression_codec: Optional[Union[str, "CompressionCodec"]] = None,
+ compression_codec: Optional[object] = None,
**kwargs
):
super(ParquetDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs)
@@ -28329,6 +29463,9 @@ class ParquetSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Parquet store settings.
:type store_settings: ~data_factory_management_client.models.StoreWriteSettings
:param format_settings: Parquet format settings.
@@ -28347,6 +29484,7 @@ class ParquetSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'},
}
@@ -28360,11 +29498,12 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
store_settings: Optional["StoreWriteSettings"] = None,
format_settings: Optional["ParquetWriteSettings"] = None,
**kwargs
):
- super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'ParquetSink' # type: str
self.store_settings = store_settings
self.format_settings = format_settings
@@ -28389,11 +29528,14 @@ class ParquetSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Parquet store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -28406,8 +29548,9 @@ class ParquetSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -28417,11 +29560,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
store_settings: Optional["StoreReadSettings"] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
**kwargs
):
- super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'ParquetSource' # type: str
self.store_settings = store_settings
self.additional_columns = additional_columns
@@ -28648,12 +29792,15 @@ class PaypalSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -28669,8 +29816,9 @@ class PaypalSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -28681,12 +29829,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'PaypalSource' # type: str
self.query = query
@@ -28915,12 +30064,15 @@ class PhoenixSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -28936,8 +30088,9 @@ class PhoenixSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -28948,12 +30101,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'PhoenixSource' # type: str
self.query = query
@@ -29288,18 +30442,26 @@ class PipelineRunInvokedBy(msrest.serialization.Model):
:vartype id: str
:ivar invoked_by_type: The type of the entity that started the run.
:vartype invoked_by_type: str
+ :ivar pipeline_name: The name of the pipeline that triggered the run, if any.
+ :vartype pipeline_name: str
+ :ivar pipeline_run_id: The run id of the pipeline that triggered the run, if any.
+ :vartype pipeline_run_id: str
"""
_validation = {
'name': {'readonly': True},
'id': {'readonly': True},
'invoked_by_type': {'readonly': True},
+ 'pipeline_name': {'readonly': True},
+ 'pipeline_run_id': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'invoked_by_type': {'key': 'invokedByType', 'type': 'str'},
+ 'pipeline_name': {'key': 'pipelineName', 'type': 'str'},
+ 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'},
}
def __init__(
@@ -29310,6 +30472,8 @@ def __init__(
self.name = None
self.id = None
self.invoked_by_type = None
+ self.pipeline_name = None
+ self.pipeline_run_id = None
class PipelineRunsQueryResponse(msrest.serialization.Model):
@@ -29476,12 +30640,15 @@ class PostgreSqlSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
"""
@@ -29496,8 +30663,9 @@ class PostgreSqlSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -29508,12 +30676,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'PostgreSqlSource' # type: str
self.query = query
@@ -29832,12 +31001,15 @@ class PrestoSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -29853,8 +31025,9 @@ class PrestoSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -29865,12 +31038,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'PrestoSource' # type: str
self.query = query
@@ -30357,12 +31531,15 @@ class QuickBooksSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -30378,8 +31555,9 @@ class QuickBooksSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -30390,12 +31568,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'QuickBooksSource' # type: str
self.query = query
@@ -30578,11 +31757,14 @@ class RelationalSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -30595,8 +31777,9 @@ class RelationalSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -30606,11 +31789,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
**kwargs
):
- super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'RelationalSource' # type: str
self.query = query
self.additional_columns = additional_columns
@@ -30975,12 +32159,15 @@ class ResponsysSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -30996,8 +32183,9 @@ class ResponsysSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -31008,12 +32196,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'ResponsysSource' # type: str
self.query = query
@@ -31166,6 +32355,8 @@ class RestServiceLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -31193,6 +32384,7 @@ class RestServiceLinkedService(LinkedService):
'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'},
'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -31215,6 +32407,7 @@ def __init__(
azure_cloud_type: Optional[object] = None,
aad_resource_id: Optional[object] = None,
encrypted_credential: Optional[object] = None,
+ credential: Optional["CredentialReference"] = None,
**kwargs
):
super(RestServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
@@ -31231,6 +32424,7 @@ def __init__(
self.azure_cloud_type = azure_cloud_type
self.aad_resource_id = aad_resource_id
self.encrypted_credential = encrypted_credential
+ self.credential = credential
class RestSink(CopySink):
@@ -31258,6 +32452,9 @@ class RestSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param request_method: The HTTP method used to call the RESTful API. The default is POST. Type:
string (or Expression with resultType string).
:type request_method: object
@@ -31288,6 +32485,7 @@ class RestSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'request_method': {'key': 'requestMethod', 'type': 'object'},
'additional_headers': {'key': 'additionalHeaders', 'type': 'object'},
'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'},
@@ -31304,6 +32502,7 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
request_method: Optional[object] = None,
additional_headers: Optional[object] = None,
http_request_timeout: Optional[object] = None,
@@ -31311,7 +32510,7 @@ def __init__(
http_compression_type: Optional[object] = None,
**kwargs
):
- super(RestSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(RestSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'RestSink' # type: str
self.request_method = request_method
self.additional_headers = additional_headers
@@ -31339,6 +32538,9 @@ class RestSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param request_method: The HTTP method used to call the RESTful API. The default is GET. Type:
string (or Expression with resultType string).
:type request_method: object
@@ -31359,8 +32561,8 @@ class RestSource(CopySource):
:param request_interval: The time to await before sending next page request.
:type request_interval: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -31373,13 +32575,14 @@ class RestSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'request_method': {'key': 'requestMethod', 'type': 'object'},
'request_body': {'key': 'requestBody', 'type': 'object'},
'additional_headers': {'key': 'additionalHeaders', 'type': 'object'},
'pagination_rules': {'key': 'paginationRules', 'type': 'object'},
'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'},
'request_interval': {'key': 'requestInterval', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -31389,16 +32592,17 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
request_method: Optional[object] = None,
request_body: Optional[object] = None,
additional_headers: Optional[object] = None,
pagination_rules: Optional[object] = None,
http_request_timeout: Optional[object] = None,
request_interval: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
**kwargs
):
- super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'RestSource' # type: str
self.request_method = request_method
self.request_body = request_body
@@ -31837,12 +33041,15 @@ class SalesforceMarketingCloudSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -31858,8 +33065,9 @@ class SalesforceMarketingCloudSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -31870,12 +33078,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SalesforceMarketingCloudSource' # type: str
self.query = query
@@ -32133,6 +33342,9 @@ class SalesforceServiceCloudSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: The write behavior for the operation. Default is Insert. Possible values
include: "Insert", "Upsert".
:type write_behavior: str or ~data_factory_management_client.models.SalesforceSinkWriteBehavior
@@ -32160,6 +33372,7 @@ class SalesforceServiceCloudSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'str'},
'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'},
'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'},
@@ -32174,12 +33387,13 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
write_behavior: Optional[Union[str, "SalesforceSinkWriteBehavior"]] = None,
external_id_field_name: Optional[object] = None,
ignore_null_values: Optional[object] = None,
**kwargs
):
- super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'SalesforceServiceCloudSink' # type: str
self.write_behavior = write_behavior
self.external_id_field_name = external_id_field_name
@@ -32205,14 +33419,17 @@ class SalesforceServiceCloudSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
:param read_behavior: The read behavior for the operation. Default is Query. Possible values
include: "Query", "QueryAll".
:type read_behavior: str or ~data_factory_management_client.models.SalesforceSourceReadBehavior
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -32225,9 +33442,10 @@ class SalesforceServiceCloudSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'read_behavior': {'key': 'readBehavior', 'type': 'str'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -32237,12 +33455,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query: Optional[object] = None,
read_behavior: Optional[Union[str, "SalesforceSourceReadBehavior"]] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
**kwargs
):
- super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'SalesforceServiceCloudSource' # type: str
self.query = query
self.read_behavior = read_behavior
@@ -32274,6 +33493,9 @@ class SalesforceSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: The write behavior for the operation. Default is Insert. Possible values
include: "Insert", "Upsert".
:type write_behavior: str or ~data_factory_management_client.models.SalesforceSinkWriteBehavior
@@ -32301,6 +33523,7 @@ class SalesforceSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'str'},
'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'},
'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'},
@@ -32315,12 +33538,13 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
write_behavior: Optional[Union[str, "SalesforceSinkWriteBehavior"]] = None,
external_id_field_name: Optional[object] = None,
ignore_null_values: Optional[object] = None,
**kwargs
):
- super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'SalesforceSink' # type: str
self.write_behavior = write_behavior
self.external_id_field_name = external_id_field_name
@@ -32346,12 +33570,15 @@ class SalesforceSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
:param read_behavior: The read behavior for the operation. Default is Query. Possible values
@@ -32369,8 +33596,9 @@ class SalesforceSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'read_behavior': {'key': 'readBehavior', 'type': 'str'},
}
@@ -32382,13 +33610,14 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
read_behavior: Optional[Union[str, "SalesforceSourceReadBehavior"]] = None,
**kwargs
):
- super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SalesforceSource' # type: str
self.query = query
self.read_behavior = read_behavior
@@ -32562,12 +33791,15 @@ class SapBwSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: MDX query. Type: string (or Expression with resultType string).
:type query: object
"""
@@ -32582,8 +33814,9 @@ class SapBwSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -32594,12 +33827,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SapBwSource' # type: str
self.query = query
@@ -32772,6 +34006,9 @@ class SapCloudForCustomerSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: The write behavior for the operation. Default is 'Insert'. Possible
values include: "Insert", "Update".
:type write_behavior: str or
@@ -32795,6 +34032,7 @@ class SapCloudForCustomerSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'str'},
'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'},
}
@@ -32808,11 +34046,12 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
write_behavior: Optional[Union[str, "SapCloudForCustomerSinkWriteBehavior"]] = None,
http_request_timeout: Optional[object] = None,
**kwargs
):
- super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'SapCloudForCustomerSink' # type: str
self.write_behavior = write_behavior
self.http_request_timeout = http_request_timeout
@@ -32837,12 +34076,15 @@ class SapCloudForCustomerSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or
Expression with resultType string).
:type query: object
@@ -32863,8 +34105,9 @@ class SapCloudForCustomerSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'},
}
@@ -32876,13 +34119,14 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
http_request_timeout: Optional[object] = None,
**kwargs
):
- super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SapCloudForCustomerSource' # type: str
self.query = query
self.http_request_timeout = http_request_timeout
@@ -33050,12 +34294,15 @@ class SapEccSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with
resultType string).
:type query: object
@@ -33076,8 +34323,9 @@ class SapEccSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'},
}
@@ -33089,13 +34337,14 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
http_request_timeout: Optional[object] = None,
**kwargs
):
- super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SapEccSource' # type: str
self.query = query
self.http_request_timeout = http_request_timeout
@@ -33226,12 +34475,15 @@ class SapHanaSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: SAP HANA Sql query. Type: string (or Expression with resultType string).
:type query: object
:param packet_size: The packet size of data read from SAP HANA. Type: integer(or Expression
@@ -33255,8 +34507,9 @@ class SapHanaSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'packet_size': {'key': 'packetSize', 'type': 'object'},
'partition_option': {'key': 'partitionOption', 'type': 'object'},
@@ -33270,15 +34523,16 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
packet_size: Optional[object] = None,
partition_option: Optional[object] = None,
partition_settings: Optional["SapHanaPartitionSettings"] = None,
**kwargs
):
- super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SapHanaSource' # type: str
self.query = query
self.packet_size = packet_size
@@ -33495,12 +34749,15 @@ class SapOpenHubSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param exclude_last_request: Whether to exclude the records of the last request. The default
value is true. Type: boolean (or Expression with resultType boolean).
:type exclude_last_request: object
@@ -33527,8 +34784,9 @@ class SapOpenHubSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'},
'base_request_id': {'key': 'baseRequestId', 'type': 'object'},
'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'},
@@ -33542,15 +34800,16 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
exclude_last_request: Optional[object] = None,
base_request_id: Optional[object] = None,
custom_rfc_read_table_function_module: Optional[object] = None,
sap_data_column_delimiter: Optional[object] = None,
**kwargs
):
- super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SapOpenHubSource' # type: str
self.exclude_last_request = exclude_last_request
self.base_request_id = base_request_id
@@ -33917,12 +35176,15 @@ class SapTableSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param row_count: The number of rows to be retrieved. Type: integer(or Expression with
resultType integer).
:type row_count: object
@@ -33964,8 +35226,9 @@ class SapTableSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'row_count': {'key': 'rowCount', 'type': 'object'},
'row_skips': {'key': 'rowSkips', 'type': 'object'},
'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'},
@@ -33984,8 +35247,9 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
row_count: Optional[object] = None,
row_skips: Optional[object] = None,
rfc_table_fields: Optional[object] = None,
@@ -33997,7 +35261,7 @@ def __init__(
partition_settings: Optional["SapTablePartitionSettings"] = None,
**kwargs
):
- super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SapTableSource' # type: str
self.row_count = row_count
self.row_skips = row_skips
@@ -34128,9 +35392,8 @@ class ScriptAction(msrest.serialization.Model):
:type name: str
:param uri: Required. The URI for the script action.
:type uri: str
- :param roles: Required. The node types on which the script action should be executed. Possible
- values include: "Headnode", "Workernode", "Zookeeper".
- :type roles: str or ~data_factory_management_client.models.HdiNodeTypes
+ :param roles: Required. The node types on which the script action should be executed.
+ :type roles: str
:param parameters: The parameters for the script action.
:type parameters: str
"""
@@ -34153,7 +35416,7 @@ def __init__(
*,
name: str,
uri: str,
- roles: Union[str, "HdiNodeTypes"],
+ roles: str,
parameters: Optional[str] = None,
**kwargs
):
@@ -34732,12 +35995,15 @@ class ServiceNowSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -34753,8 +36019,9 @@ class ServiceNowSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -34765,16 +36032,72 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'ServiceNowSource' # type: str
self.query = query
+class ServicePrincipalCredential(Credential):
+ """Service principal credential.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :type additional_properties: dict[str, object]
+ :param type: Required. Type of credential.Constant filled by server.
+ :type type: str
+ :param description: Credential description.
+ :type description: str
+ :param annotations: List of tags that can be used for describing the Credential.
+ :type annotations: list[object]
+ :param service_principal_id: The app ID of the service principal used to authenticate.
+ :type service_principal_id: object
+ :param service_principal_key: The key of the service principal used to authenticate.
+ :type service_principal_key:
+ ~data_factory_management_client.models.AzureKeyVaultSecretReference
+ :param tenant: The ID of the tenant to which the service principal belongs.
+ :type tenant: object
+ """
+
+ _validation = {
+ 'type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'additional_properties': {'key': '', 'type': '{object}'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'annotations': {'key': 'annotations', 'type': '[object]'},
+ 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'},
+ 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'AzureKeyVaultSecretReference'},
+ 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'},
+ }
+
+ def __init__(
+ self,
+ *,
+ additional_properties: Optional[Dict[str, object]] = None,
+ description: Optional[str] = None,
+ annotations: Optional[List[object]] = None,
+ service_principal_id: Optional[object] = None,
+ service_principal_key: Optional["AzureKeyVaultSecretReference"] = None,
+ tenant: Optional[object] = None,
+ **kwargs
+ ):
+ super(ServicePrincipalCredential, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs)
+ self.type = 'ServicePrincipal' # type: str
+ self.service_principal_id = service_principal_id
+ self.service_principal_key = service_principal_key
+ self.tenant = tenant
+
+
class SetVariableActivity(Activity):
"""Set value for a Variable.
@@ -34887,6 +36210,9 @@ class SftpReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -34924,6 +36250,7 @@ class SftpReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -34940,6 +36267,7 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
wildcard_folder_path: Optional[object] = None,
wildcard_file_name: Optional[object] = None,
@@ -34951,7 +36279,7 @@ def __init__(
modified_datetime_end: Optional[object] = None,
**kwargs
):
- super(SftpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(SftpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'SftpReadSettings' # type: str
self.recursive = recursive
self.wildcard_folder_path = wildcard_folder_path
@@ -35095,6 +36423,9 @@ class SftpWriteSettings(StoreWriteSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param operation_timeout: Specifies the timeout for writing each chunk to SFTP server. Default
@@ -35114,6 +36445,7 @@ class SftpWriteSettings(StoreWriteSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'operation_timeout': {'key': 'operationTimeout', 'type': 'object'},
'use_temp_file_rename': {'key': 'useTempFileRename', 'type': 'object'},
@@ -35124,12 +36456,13 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
copy_behavior: Optional[object] = None,
operation_timeout: Optional[object] = None,
use_temp_file_rename: Optional[object] = None,
**kwargs
):
- super(SftpWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs)
+ super(SftpWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs)
self.type = 'SftpWriteSettings' # type: str
self.operation_timeout = operation_timeout
self.use_temp_file_rename = use_temp_file_rename
@@ -35308,6 +36641,9 @@ class SharePointOnlineListSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: The OData query to filter the data in SharePoint Online list. For example,
"$top=1". Type: string (or Expression with resultType string).
:type query: object
@@ -35327,6 +36663,7 @@ class SharePointOnlineListSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'},
}
@@ -35338,11 +36675,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query: Optional[object] = None,
http_request_timeout: Optional[object] = None,
**kwargs
):
- super(SharePointOnlineListSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(SharePointOnlineListSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'SharePointOnlineListSource' # type: str
self.query = query
self.http_request_timeout = http_request_timeout
@@ -35520,12 +36858,15 @@ class ShopifySource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -35541,8 +36882,9 @@ class ShopifySource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -35553,12 +36895,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'ShopifySource' # type: str
self.query = query
@@ -35851,6 +37194,9 @@ class SnowflakeSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType
string).
:type pre_copy_script: object
@@ -35870,6 +37216,7 @@ class SnowflakeSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
'import_settings': {'key': 'importSettings', 'type': 'SnowflakeImportCopyCommand'},
}
@@ -35883,11 +37230,12 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
pre_copy_script: Optional[object] = None,
import_settings: Optional["SnowflakeImportCopyCommand"] = None,
**kwargs
):
- super(SnowflakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(SnowflakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'SnowflakeSink' # type: str
self.pre_copy_script = pre_copy_script
self.import_settings = import_settings
@@ -35912,6 +37260,9 @@ class SnowflakeSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Snowflake Sql query. Type: string (or Expression with resultType string).
:type query: object
:param export_settings: Snowflake export settings.
@@ -35928,6 +37279,7 @@ class SnowflakeSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'export_settings': {'key': 'exportSettings', 'type': 'SnowflakeExportCopyCommand'},
}
@@ -35939,11 +37291,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query: Optional[object] = None,
export_settings: Optional["SnowflakeExportCopyCommand"] = None,
**kwargs
):
- super(SnowflakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(SnowflakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'SnowflakeSource' # type: str
self.query = query
self.export_settings = export_settings
@@ -36184,12 +37537,15 @@ class SparkSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -36205,8 +37561,9 @@ class SparkSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -36217,12 +37574,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SparkSource' # type: str
self.query = query
@@ -36294,6 +37652,9 @@ class SqlDwSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType
string).
:type pre_copy_script: object
@@ -36311,6 +37672,14 @@ class SqlDwSink(CopySink):
:param table_option: The option to handle sink table, such as autoCreate. For now only
'autoCreate' value is supported. Type: string (or Expression with resultType string).
:type table_option: object
+ :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or
+ Expression with resultType boolean).
+ :type sql_writer_use_table_lock: object
+ :param write_behavior: Write behavior when copying data into azure SQL DW. Type:
+ SqlDWWriteBehaviorEnum (or Expression with resultType SqlDWWriteBehaviorEnum).
+ :type write_behavior: object
+ :param upsert_settings: SQL DW upsert settings.
+ :type upsert_settings: ~data_factory_management_client.models.SqlDwUpsertSettings
"""
_validation = {
@@ -36325,12 +37694,16 @@ class SqlDwSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'},
'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'},
'allow_copy_command': {'key': 'allowCopyCommand', 'type': 'object'},
'copy_command_settings': {'key': 'copyCommandSettings', 'type': 'DwCopyCommandSettings'},
'table_option': {'key': 'tableOption', 'type': 'object'},
+ 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'},
+ 'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
+ 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlDwUpsertSettings'},
}
def __init__(
@@ -36342,15 +37715,19 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
pre_copy_script: Optional[object] = None,
allow_poly_base: Optional[object] = None,
poly_base_settings: Optional["PolybaseSettings"] = None,
allow_copy_command: Optional[object] = None,
copy_command_settings: Optional["DwCopyCommandSettings"] = None,
table_option: Optional[object] = None,
+ sql_writer_use_table_lock: Optional[object] = None,
+ write_behavior: Optional[object] = None,
+ upsert_settings: Optional["SqlDwUpsertSettings"] = None,
**kwargs
):
- super(SqlDwSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(SqlDwSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'SqlDWSink' # type: str
self.pre_copy_script = pre_copy_script
self.allow_poly_base = allow_poly_base
@@ -36358,6 +37735,9 @@ def __init__(
self.allow_copy_command = allow_copy_command
self.copy_command_settings = copy_command_settings
self.table_option = table_option
+ self.sql_writer_use_table_lock = sql_writer_use_table_lock
+ self.write_behavior = write_behavior
+ self.upsert_settings = upsert_settings
class SqlDwSource(TabularSource):
@@ -36379,12 +37759,15 @@ class SqlDwSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with
resultType string).
:type sql_reader_query: object
@@ -36413,8 +37796,9 @@ class SqlDwSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'},
'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'},
@@ -36429,8 +37813,9 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
sql_reader_query: Optional[object] = None,
sql_reader_stored_procedure_name: Optional[object] = None,
stored_procedure_parameters: Optional[object] = None,
@@ -36438,7 +37823,7 @@ def __init__(
partition_settings: Optional["SqlPartitionSettings"] = None,
**kwargs
):
- super(SqlDwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SqlDwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SqlDWSource' # type: str
self.sql_reader_query = sql_reader_query
self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name
@@ -36447,6 +37832,34 @@ def __init__(
self.partition_settings = partition_settings
+class SqlDwUpsertSettings(msrest.serialization.Model):
+ """Sql DW upsert option settings.
+
+ :param interim_schema_name: Schema name for interim table. Type: string (or Expression with
+ resultType string).
+ :type interim_schema_name: object
+ :param keys: Key column names for unique row identification. Type: array of strings (or
+ Expression with resultType array of strings).
+ :type keys: object
+ """
+
+ _attribute_map = {
+ 'interim_schema_name': {'key': 'interimSchemaName', 'type': 'object'},
+ 'keys': {'key': 'keys', 'type': 'object'},
+ }
+
+ def __init__(
+ self,
+ *,
+ interim_schema_name: Optional[object] = None,
+ keys: Optional[object] = None,
+ **kwargs
+ ):
+ super(SqlDwUpsertSettings, self).__init__(**kwargs)
+ self.interim_schema_name = interim_schema_name
+ self.keys = keys
+
+
class SqlMiSink(CopySink):
"""A copy activity Azure SQL Managed Instance sink.
@@ -36472,6 +37885,9 @@ class SqlMiSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or
Expression with resultType string).
:type sql_writer_stored_procedure_name: object
@@ -36490,6 +37906,14 @@ class SqlMiSink(CopySink):
:param table_option: The option to handle sink table, such as autoCreate. For now only
'autoCreate' value is supported. Type: string (or Expression with resultType string).
:type table_option: object
+ :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or
+ Expression with resultType boolean).
+ :type sql_writer_use_table_lock: object
+ :param write_behavior: White behavior when copying data into azure SQL MI. Type:
+ SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum).
+ :type write_behavior: object
+ :param upsert_settings: SQL upsert settings.
+ :type upsert_settings: ~data_factory_management_client.models.SqlUpsertSettings
"""
_validation = {
@@ -36504,12 +37928,16 @@ class SqlMiSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'},
'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'},
'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'},
'table_option': {'key': 'tableOption', 'type': 'object'},
+ 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'},
+ 'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
+ 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'},
}
def __init__(
@@ -36521,15 +37949,19 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
sql_writer_stored_procedure_name: Optional[object] = None,
sql_writer_table_type: Optional[object] = None,
pre_copy_script: Optional[object] = None,
stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None,
stored_procedure_table_type_parameter_name: Optional[object] = None,
table_option: Optional[object] = None,
+ sql_writer_use_table_lock: Optional[object] = None,
+ write_behavior: Optional[object] = None,
+ upsert_settings: Optional["SqlUpsertSettings"] = None,
**kwargs
):
- super(SqlMiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(SqlMiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'SqlMISink' # type: str
self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name
self.sql_writer_table_type = sql_writer_table_type
@@ -36537,6 +37969,9 @@ def __init__(
self.stored_procedure_parameters = stored_procedure_parameters
self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name
self.table_option = table_option
+ self.sql_writer_use_table_lock = sql_writer_use_table_lock
+ self.write_behavior = write_behavior
+ self.upsert_settings = upsert_settings
class SqlMiSource(TabularSource):
@@ -36558,12 +37993,15 @@ class SqlMiSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string).
:type sql_reader_query: object
:param sql_reader_stored_procedure_name: Name of the stored procedure for a Azure SQL Managed
@@ -36593,8 +38031,9 @@ class SqlMiSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'},
'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'},
@@ -36610,8 +38049,9 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
sql_reader_query: Optional[object] = None,
sql_reader_stored_procedure_name: Optional[object] = None,
stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None,
@@ -36620,7 +38060,7 @@ def __init__(
partition_settings: Optional["SqlPartitionSettings"] = None,
**kwargs
):
- super(SqlMiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SqlMiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SqlMISource' # type: str
self.sql_reader_query = sql_reader_query
self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name
@@ -36772,6 +38212,9 @@ class SqlServerSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or
Expression with resultType string).
:type sql_writer_stored_procedure_name: object
@@ -36790,6 +38233,14 @@ class SqlServerSink(CopySink):
:param table_option: The option to handle sink table, such as autoCreate. For now only
'autoCreate' value is supported. Type: string (or Expression with resultType string).
:type table_option: object
+ :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or
+ Expression with resultType boolean).
+ :type sql_writer_use_table_lock: object
+ :param write_behavior: Write behavior when copying data into sql server. Type:
+ SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum).
+ :type write_behavior: object
+ :param upsert_settings: SQL upsert settings.
+ :type upsert_settings: ~data_factory_management_client.models.SqlUpsertSettings
"""
_validation = {
@@ -36804,12 +38255,16 @@ class SqlServerSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'},
'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'},
'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'},
'table_option': {'key': 'tableOption', 'type': 'object'},
+ 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'},
+ 'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
+ 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'},
}
def __init__(
@@ -36821,15 +38276,19 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
sql_writer_stored_procedure_name: Optional[object] = None,
sql_writer_table_type: Optional[object] = None,
pre_copy_script: Optional[object] = None,
stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None,
stored_procedure_table_type_parameter_name: Optional[object] = None,
table_option: Optional[object] = None,
+ sql_writer_use_table_lock: Optional[object] = None,
+ write_behavior: Optional[object] = None,
+ upsert_settings: Optional["SqlUpsertSettings"] = None,
**kwargs
):
- super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'SqlServerSink' # type: str
self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name
self.sql_writer_table_type = sql_writer_table_type
@@ -36837,6 +38296,9 @@ def __init__(
self.stored_procedure_parameters = stored_procedure_parameters
self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name
self.table_option = table_option
+ self.sql_writer_use_table_lock = sql_writer_use_table_lock
+ self.write_behavior = write_behavior
+ self.upsert_settings = upsert_settings
class SqlServerSource(TabularSource):
@@ -36858,12 +38320,15 @@ class SqlServerSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string).
:type sql_reader_query: object
:param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database
@@ -36893,8 +38358,9 @@ class SqlServerSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'},
'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'},
@@ -36910,8 +38376,9 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
sql_reader_query: Optional[object] = None,
sql_reader_stored_procedure_name: Optional[object] = None,
stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None,
@@ -36920,7 +38387,7 @@ def __init__(
partition_settings: Optional["SqlPartitionSettings"] = None,
**kwargs
):
- super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SqlServerSource' # type: str
self.sql_reader_query = sql_reader_query
self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name
@@ -37106,6 +38573,9 @@ class SqlSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or
Expression with resultType string).
:type sql_writer_stored_procedure_name: object
@@ -37124,6 +38594,14 @@ class SqlSink(CopySink):
:param table_option: The option to handle sink table, such as autoCreate. For now only
'autoCreate' value is supported. Type: string (or Expression with resultType string).
:type table_option: object
+ :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or
+ Expression with resultType boolean).
+ :type sql_writer_use_table_lock: object
+ :param write_behavior: Write behavior when copying data into sql. Type: SqlWriteBehaviorEnum
+ (or Expression with resultType SqlWriteBehaviorEnum).
+ :type write_behavior: object
+ :param upsert_settings: SQL upsert settings.
+ :type upsert_settings: ~data_factory_management_client.models.SqlUpsertSettings
"""
_validation = {
@@ -37138,12 +38616,16 @@ class SqlSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'},
'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'},
'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'},
'table_option': {'key': 'tableOption', 'type': 'object'},
+ 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'},
+ 'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
+ 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'},
}
def __init__(
@@ -37155,15 +38637,19 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
sql_writer_stored_procedure_name: Optional[object] = None,
sql_writer_table_type: Optional[object] = None,
pre_copy_script: Optional[object] = None,
stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None,
stored_procedure_table_type_parameter_name: Optional[object] = None,
table_option: Optional[object] = None,
+ sql_writer_use_table_lock: Optional[object] = None,
+ write_behavior: Optional[object] = None,
+ upsert_settings: Optional["SqlUpsertSettings"] = None,
**kwargs
):
- super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'SqlSink' # type: str
self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name
self.sql_writer_table_type = sql_writer_table_type
@@ -37171,6 +38657,9 @@ def __init__(
self.stored_procedure_parameters = stored_procedure_parameters
self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name
self.table_option = table_option
+ self.sql_writer_use_table_lock = sql_writer_use_table_lock
+ self.write_behavior = write_behavior
+ self.upsert_settings = upsert_settings
class SqlSource(TabularSource):
@@ -37192,12 +38681,15 @@ class SqlSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string).
:type sql_reader_query: object
:param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database
@@ -37229,8 +38721,9 @@ class SqlSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'},
'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'},
@@ -37246,8 +38739,9 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
sql_reader_query: Optional[object] = None,
sql_reader_stored_procedure_name: Optional[object] = None,
stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None,
@@ -37256,7 +38750,7 @@ def __init__(
partition_settings: Optional["SqlPartitionSettings"] = None,
**kwargs
):
- super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SqlSource' # type: str
self.sql_reader_query = sql_reader_query
self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name
@@ -37266,6 +38760,40 @@ def __init__(
self.partition_settings = partition_settings
+class SqlUpsertSettings(msrest.serialization.Model):
+ """Sql upsert option settings.
+
+ :param use_temp_db: Specifies whether to use temp db for upsert interim table. Type: boolean
+ (or Expression with resultType boolean).
+ :type use_temp_db: object
+ :param interim_schema_name: Schema name for interim table. Type: string (or Expression with
+ resultType string).
+ :type interim_schema_name: object
+ :param keys: Key column names for unique row identification. Type: array of strings (or
+ Expression with resultType array of strings).
+ :type keys: object
+ """
+
+ _attribute_map = {
+ 'use_temp_db': {'key': 'useTempDB', 'type': 'object'},
+ 'interim_schema_name': {'key': 'interimSchemaName', 'type': 'object'},
+ 'keys': {'key': 'keys', 'type': 'object'},
+ }
+
+ def __init__(
+ self,
+ *,
+ use_temp_db: Optional[object] = None,
+ interim_schema_name: Optional[object] = None,
+ keys: Optional[object] = None,
+ **kwargs
+ ):
+ super(SqlUpsertSettings, self).__init__(**kwargs)
+ self.use_temp_db = use_temp_db
+ self.interim_schema_name = interim_schema_name
+ self.keys = keys
+
+
class SquareLinkedService(LinkedService):
"""Square Service linked service.
@@ -37453,12 +38981,15 @@ class SquareSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -37474,8 +39005,9 @@ class SquareSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -37486,12 +39018,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SquareSource' # type: str
self.query = query
@@ -38555,12 +40088,15 @@ class SybaseSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
"""
@@ -38575,8 +40111,9 @@ class SybaseSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -38587,12 +40124,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SybaseSource' # type: str
self.query = query
@@ -38955,12 +40493,15 @@ class TeradataSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: Teradata query. Type: string (or Expression with resultType string).
:type query: object
:param partition_option: The partition mechanism that will be used for teradata read in
@@ -38981,8 +40522,9 @@ class TeradataSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'partition_option': {'key': 'partitionOption', 'type': 'object'},
'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'},
@@ -38995,14 +40537,15 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
partition_option: Optional[object] = None,
partition_settings: Optional["TeradataPartitionSettings"] = None,
**kwargs
):
- super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'TeradataSource' # type: str
self.query = query
self.partition_option = partition_option
@@ -40159,12 +41702,15 @@ class VerticaSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -40180,8 +41726,9 @@ class VerticaSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -40192,12 +41739,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'VerticaSource' # type: str
self.query = query
@@ -40443,10 +41991,7 @@ def __init__(
class WebActivityAuthentication(msrest.serialization.Model):
"""Web activity authentication properties.
- All required parameters must be populated in order to send to Azure.
-
- :param type: Required. Web activity authentication
- (Basic/ClientCertificate/MSI/ServicePrincipal).
+ :param type: Web activity authentication (Basic/ClientCertificate/MSI/ServicePrincipal).
:type type: str
:param pfx: Base64-encoded contents of a PFX file or Certificate when used for
ServicePrincipal.
@@ -40463,12 +42008,10 @@ class WebActivityAuthentication(msrest.serialization.Model):
:param user_tenant: TenantId for which Azure Auth token will be requested when using
ServicePrincipal Authentication. Type: string (or Expression with resultType string).
:type user_tenant: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
- _validation = {
- 'type': {'required': True},
- }
-
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'pfx': {'key': 'pfx', 'type': 'SecretBase'},
@@ -40476,17 +42019,19 @@ class WebActivityAuthentication(msrest.serialization.Model):
'password': {'key': 'password', 'type': 'SecretBase'},
'resource': {'key': 'resource', 'type': 'object'},
'user_tenant': {'key': 'userTenant', 'type': 'object'},
+ 'credential': {'key': 'credential', 'type': 'CredentialReference'},
}
def __init__(
self,
*,
- type: str,
+ type: Optional[str] = None,
pfx: Optional["SecretBase"] = None,
username: Optional[object] = None,
password: Optional["SecretBase"] = None,
resource: Optional[object] = None,
user_tenant: Optional[object] = None,
+ credential: Optional["CredentialReference"] = None,
**kwargs
):
super(WebActivityAuthentication, self).__init__(**kwargs)
@@ -40496,6 +42041,7 @@ def __init__(
self.password = password
self.resource = resource
self.user_tenant = user_tenant
+ self.credential = credential
class WebLinkedServiceTypeProperties(msrest.serialization.Model):
@@ -40833,9 +42379,12 @@ class WebSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -40848,7 +42397,8 @@ class WebSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -40858,10 +42408,11 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ disable_metrics_collection: Optional[object] = None,
+ additional_columns: Optional[object] = None,
**kwargs
):
- super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'WebSource' # type: str
self.additional_columns = additional_columns
@@ -41125,12 +42676,15 @@ class XeroSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -41146,8 +42700,9 @@ class XeroSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -41158,12 +42713,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'XeroSource' # type: str
self.query = query
@@ -41336,13 +42892,16 @@ class XmlSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Xml store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param format_settings: Xml format settings.
:type format_settings: ~data_factory_management_client.models.XmlReadSettings
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
"""
_validation = {
@@ -41355,9 +42914,10 @@ class XmlSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'XmlReadSettings'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
}
def __init__(
@@ -41367,12 +42927,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
store_settings: Optional["StoreReadSettings"] = None,
format_settings: Optional["XmlReadSettings"] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
**kwargs
):
- super(XmlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(XmlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'XmlSource' # type: str
self.store_settings = store_settings
self.format_settings = format_settings
@@ -41592,12 +43153,15 @@ class ZohoSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
- array of objects (or Expression with resultType array of objects).
- :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
+ array of objects(AdditionalColumns) (or Expression with resultType array of objects).
+ :type additional_columns: object
:param query: A query to retrieve data from source. Type: string (or Expression with resultType
string).
:type query: object
@@ -41613,8 +43177,9 @@ class ZohoSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
- 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
+ 'additional_columns': {'key': 'additionalColumns', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
}
@@ -41625,11 +43190,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
- additional_columns: Optional[List["AdditionalColumns"]] = None,
+ additional_columns: Optional[object] = None,
query: Optional[object] = None,
**kwargs
):
- super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'ZohoSource' # type: str
self.query = query
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_run_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_run_operations.py
deleted file mode 100644
index 192e09232ad..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_run_operations.py
+++ /dev/null
@@ -1,132 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-import datetime
-from typing import TYPE_CHECKING
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpRequest, HttpResponse
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from .. import models
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
-
- T = TypeVar('T')
- ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
-
-class ActivityRunOperations(object):
- """ActivityRunOperations operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer):
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def query_by_pipeline_run(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- run_id, # type: str
- last_updated_after, # type: datetime.datetime
- last_updated_before, # type: datetime.datetime
- continuation_token_parameter=None, # type: Optional[str]
- filters=None, # type: Optional[List["models.RunQueryFilter"]]
- order_by=None, # type: Optional[List["models.RunQueryOrderBy"]]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.ActivityRunsQueryResponse"
- """Query activity runs based on input filter conditions.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param run_id: The pipeline run identifier.
- :type run_id: str
- :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601'
- format.
- :type last_updated_after: ~datetime.datetime
- :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601'
- format.
- :type last_updated_before: ~datetime.datetime
- :param continuation_token_parameter: The continuation token for getting the next page of
- results. Null for first page.
- :type continuation_token_parameter: str
- :param filters: List of filters.
- :type filters: list[~data_factory_management_client.models.RunQueryFilter]
- :param order_by: List of OrderBy option.
- :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy]
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ActivityRunsQueryResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.ActivityRunsQueryResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ActivityRunsQueryResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.query_by_pipeline_run.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'runId': self._serialize.url("run_id", run_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(filter_parameters, 'RunFilterParameters')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('ActivityRunsQueryResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- query_by_pipeline_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/queryActivityruns'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_operations.py
deleted file mode 100644
index e0bd3be1783..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_operations.py
+++ /dev/null
@@ -1,317 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import TYPE_CHECKING
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.paging import ItemPaged
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpRequest, HttpResponse
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from .. import models
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
-
- T = TypeVar('T')
- ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
-
-class DataFlowOperations(object):
- """DataFlowOperations operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer):
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def create_or_update(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- data_flow_name, # type: str
- properties, # type: "models.DataFlow"
- if_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.DataFlowResource"
- """Creates or updates a data flow.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param data_flow_name: The data flow name.
- :type data_flow_name: str
- :param properties: Data flow properties.
- :type properties: ~data_factory_management_client.models.DataFlow
- :param if_match: ETag of the data flow entity. Should only be specified for update, for which
- it should match existing entity or can be * for unconditional update.
- :type if_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: DataFlowResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.DataFlowResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- data_flow = models.DataFlowResource(properties=properties)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(data_flow, 'DataFlowResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('DataFlowResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore
-
- def get(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- data_flow_name, # type: str
- if_none_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.DataFlowResource"
- """Gets a data flow.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param data_flow_name: The data flow name.
- :type data_flow_name: str
- :param if_none_match: ETag of the data flow entity. Should only be specified for get. If the
- ETag matches the existing entity tag, or if * was provided, then no content will be returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: DataFlowResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.DataFlowResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('DataFlowResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore
-
- def delete(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- data_flow_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Deletes a data flow.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param data_flow_name: The data flow name.
- :type data_flow_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore
-
- def list_by_factory(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> Iterable["models.DataFlowListResponse"]
- """Lists data flows.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either DataFlowListResponse or the result of cls(response)
- :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.DataFlowListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- def extract_data(pipeline_response):
- deserialized = self._deserialize('DataFlowListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, iter(list_of_elem)
-
- def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return ItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_dataset_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_dataset_operations.py
deleted file mode 100644
index 2f866416c74..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_dataset_operations.py
+++ /dev/null
@@ -1,319 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import TYPE_CHECKING
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.paging import ItemPaged
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpRequest, HttpResponse
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from .. import models
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
-
- T = TypeVar('T')
- ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
-
-class DatasetOperations(object):
- """DatasetOperations operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer):
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list_by_factory(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> Iterable["models.DatasetListResponse"]
- """Lists datasets.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either DatasetListResponse or the result of cls(response)
- :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.DatasetListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- def extract_data(pipeline_response):
- deserialized = self._deserialize('DatasetListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, iter(list_of_elem)
-
- def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return ItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets'} # type: ignore
-
- def create_or_update(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- dataset_name, # type: str
- properties, # type: "models.Dataset"
- if_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.DatasetResource"
- """Creates or updates a dataset.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param dataset_name: The dataset name.
- :type dataset_name: str
- :param properties: Dataset properties.
- :type properties: ~data_factory_management_client.models.Dataset
- :param if_match: ETag of the dataset entity. Should only be specified for update, for which it
- should match existing entity or can be * for unconditional update.
- :type if_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: DatasetResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.DatasetResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- dataset = models.DatasetResource(properties=properties)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(dataset, 'DatasetResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('DatasetResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore
-
- def get(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- dataset_name, # type: str
- if_none_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> Optional["models.DatasetResource"]
- """Gets a dataset.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param dataset_name: The dataset name.
- :type dataset_name: str
- :param if_none_match: ETag of the dataset entity. Should only be specified for get. If the ETag
- matches the existing entity tag, or if * was provided, then no content will be returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: DatasetResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.DatasetResource or None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 304]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('DatasetResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore
-
- def delete(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- dataset_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Deletes a dataset.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param dataset_name: The dataset name.
- :type dataset_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factory_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factory_operations.py
deleted file mode 100644
index 5b8622e97f9..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factory_operations.py
+++ /dev/null
@@ -1,671 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import TYPE_CHECKING
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.paging import ItemPaged
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpRequest, HttpResponse
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from .. import models
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
-
- T = TypeVar('T')
- ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
-
-class FactoryOperations(object):
- """FactoryOperations operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer):
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list(
- self,
- **kwargs # type: Any
- ):
- # type: (...) -> Iterable["models.FactoryListResponse"]
- """Lists factories under the specified subscription.
-
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either FactoryListResponse or the result of cls(response)
- :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.FactoryListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- def extract_data(pipeline_response):
- deserialized = self._deserialize('FactoryListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, iter(list_of_elem)
-
- def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return ItemPaged(
- get_next, extract_data
- )
- list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/factories'} # type: ignore
-
- def configure_factory_repo(
- self,
- location_id, # type: str
- factory_resource_id=None, # type: Optional[str]
- repo_configuration=None, # type: Optional["models.FactoryRepoConfiguration"]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.Factory"
- """Updates a factory's repo information.
-
- :param location_id: The location identifier.
- :type location_id: str
- :param factory_resource_id: The factory resource id.
- :type factory_resource_id: str
- :param repo_configuration: Git repo information of the factory.
- :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: Factory, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.Factory
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- factory_repo_update = models.FactoryRepoUpdate(factory_resource_id=factory_resource_id, repo_configuration=repo_configuration)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.configure_factory_repo.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'locationId': self._serialize.url("location_id", location_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(factory_repo_update, 'FactoryRepoUpdate')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('Factory', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- configure_factory_repo.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/configureFactoryRepo'} # type: ignore
-
- def list_by_resource_group(
- self,
- resource_group_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> Iterable["models.FactoryListResponse"]
- """Lists factories.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either FactoryListResponse or the result of cls(response)
- :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.FactoryListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_resource_group.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- def extract_data(pipeline_response):
- deserialized = self._deserialize('FactoryListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, iter(list_of_elem)
-
- def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return ItemPaged(
- get_next, extract_data
- )
- list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories'} # type: ignore
-
- def create_or_update(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- if_match=None, # type: Optional[str]
- location=None, # type: Optional[str]
- tags=None, # type: Optional[Dict[str, str]]
- identity=None, # type: Optional["models.FactoryIdentity"]
- repo_configuration=None, # type: Optional["models.FactoryRepoConfiguration"]
- global_parameters=None, # type: Optional[Dict[str, "models.GlobalParameterSpecification"]]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.Factory"
- """Creates or updates a factory.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param if_match: ETag of the factory entity. Should only be specified for update, for which it
- should match existing entity or can be * for unconditional update.
- :type if_match: str
- :param location: The resource location.
- :type location: str
- :param tags: The resource tags.
- :type tags: dict[str, str]
- :param identity: Managed service identity of the factory.
- :type identity: ~data_factory_management_client.models.FactoryIdentity
- :param repo_configuration: Git repo information of the factory.
- :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration
- :param global_parameters: List of parameters for factory.
- :type global_parameters: dict[str, ~data_factory_management_client.models.GlobalParameterSpecification]
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: Factory, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.Factory
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- factory = models.Factory(location=location, tags=tags, identity=identity, repo_configuration=repo_configuration, global_parameters=global_parameters)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(factory, 'Factory')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('Factory', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore
-
- def update(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- tags=None, # type: Optional[Dict[str, str]]
- identity=None, # type: Optional["models.FactoryIdentity"]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.Factory"
- """Updates a factory.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param tags: The resource tags.
- :type tags: dict[str, str]
- :param identity: Managed service identity of the factory.
- :type identity: ~data_factory_management_client.models.FactoryIdentity
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: Factory, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.Factory
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- factory_update_parameters = models.FactoryUpdateParameters(tags=tags, identity=identity)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(factory_update_parameters, 'FactoryUpdateParameters')
- body_content_kwargs['content'] = body_content
- request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('Factory', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore
-
- def get(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- if_none_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> Optional["models.Factory"]
- """Gets a factory.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param if_none_match: ETag of the factory entity. Should only be specified for get. If the ETag
- matches the existing entity tag, or if * was provided, then no content will be returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: Factory, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.Factory or None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Factory"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 304]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('Factory', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore
-
- def delete(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Deletes a factory.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore
-
- def get_git_hub_access_token(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- git_hub_access_code, # type: str
- git_hub_access_token_base_url, # type: str
- git_hub_client_id=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.GitHubAccessTokenResponse"
- """Get GitHub Access Token.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param git_hub_access_code: GitHub access code.
- :type git_hub_access_code: str
- :param git_hub_access_token_base_url: GitHub access token base URL.
- :type git_hub_access_token_base_url: str
- :param git_hub_client_id: GitHub application client ID.
- :type git_hub_client_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: GitHubAccessTokenResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.GitHubAccessTokenResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.GitHubAccessTokenResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- git_hub_access_token_request = models.GitHubAccessTokenRequest(git_hub_access_code=git_hub_access_code, git_hub_client_id=git_hub_client_id, git_hub_access_token_base_url=git_hub_access_token_base_url)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.get_git_hub_access_token.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(git_hub_access_token_request, 'GitHubAccessTokenRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('GitHubAccessTokenResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_git_hub_access_token.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getGitHubAccessToken'} # type: ignore
-
- def get_data_plane_access(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- permissions=None, # type: Optional[str]
- access_resource_path=None, # type: Optional[str]
- profile_name=None, # type: Optional[str]
- start_time=None, # type: Optional[str]
- expire_time=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.AccessPolicyResponse"
- """Get Data Plane access.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param permissions: The string with permissions for Data Plane access. Currently only 'r' is
- supported which grants read only access.
- :type permissions: str
- :param access_resource_path: The resource path to get access relative to factory. Currently
- only empty string is supported which corresponds to the factory resource.
- :type access_resource_path: str
- :param profile_name: The name of the profile. Currently only the default is supported. The
- default value is DefaultProfile.
- :type profile_name: str
- :param start_time: Start time for the token. If not specified the current time will be used.
- :type start_time: str
- :param expire_time: Expiration time for the token. Maximum duration for the token is eight
- hours and by default the token will expire in eight hours.
- :type expire_time: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: AccessPolicyResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.AccessPolicyResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.AccessPolicyResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- policy = models.UserAccessPolicy(permissions=permissions, access_resource_path=access_resource_path, profile_name=profile_name, start_time=start_time, expire_time=expire_time)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.get_data_plane_access.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(policy, 'UserAccessPolicy')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('AccessPolicyResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_data_plane_access.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getDataPlaneAccess'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_node_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_node_operations.py
deleted file mode 100644
index a7903633080..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_node_operations.py
+++ /dev/null
@@ -1,309 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import TYPE_CHECKING
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpRequest, HttpResponse
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from .. import models
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from typing import Any, Callable, Dict, Generic, Optional, TypeVar
-
- T = TypeVar('T')
- ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
-
-class IntegrationRuntimeNodeOperations(object):
- """IntegrationRuntimeNodeOperations operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer):
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def get(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- node_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> "models.SelfHostedIntegrationRuntimeNode"
- """Gets a self-hosted integration runtime node.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param node_name: The integration runtime node name.
- :type node_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore
-
- def delete(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- node_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Deletes a self-hosted integration runtime node.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param node_name: The integration runtime node name.
- :type node_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore
-
- def update(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- node_name, # type: str
- concurrent_jobs_limit=None, # type: Optional[int]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.SelfHostedIntegrationRuntimeNode"
- """Updates a self-hosted integration runtime node.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param node_name: The integration runtime node name.
- :type node_name: str
- :param concurrent_jobs_limit: The number of concurrent jobs permitted to run on the integration
- runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed.
- :type concurrent_jobs_limit: int
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- update_integration_runtime_node_request = models.UpdateIntegrationRuntimeNodeRequest(concurrent_jobs_limit=concurrent_jobs_limit)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(update_integration_runtime_node_request, 'UpdateIntegrationRuntimeNodeRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore
-
- def get_ip_address(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- node_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> "models.IntegrationRuntimeNodeIpAddress"
- """Get the IP address of self-hosted integration runtime node.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param node_name: The integration runtime node name.
- :type node_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeNodeIpAddress, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeNodeIpAddress
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeNodeIpAddress"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get_ip_address.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeNodeIpAddress', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_ip_address.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}/ipAddress'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py
deleted file mode 100644
index 1fb5fc6b30d..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py
+++ /dev/null
@@ -1,1198 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import TYPE_CHECKING
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.paging import ItemPaged
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpRequest, HttpResponse
-from azure.core.polling import LROPoller, NoPolling, PollingMethod
-from azure.mgmt.core.exceptions import ARMErrorFormat
-from azure.mgmt.core.polling.arm_polling import ARMPolling
-
-from .. import models
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
-
- T = TypeVar('T')
- ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
-
-class IntegrationRuntimeOperations(object):
- """IntegrationRuntimeOperations operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer):
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list_by_factory(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> Iterable["models.IntegrationRuntimeListResponse"]
- """Lists integration runtimes.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either IntegrationRuntimeListResponse or the result of cls(response)
- :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.IntegrationRuntimeListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- def extract_data(pipeline_response):
- deserialized = self._deserialize('IntegrationRuntimeListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, iter(list_of_elem)
-
- def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return ItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes'} # type: ignore
-
- def create_or_update(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- properties, # type: "models.IntegrationRuntime"
- if_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.IntegrationRuntimeResource"
- """Creates or updates an integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param properties: Integration runtime properties.
- :type properties: ~data_factory_management_client.models.IntegrationRuntime
- :param if_match: ETag of the integration runtime entity. Should only be specified for update,
- for which it should match existing entity or can be * for unconditional update.
- :type if_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- integration_runtime = models.IntegrationRuntimeResource(properties=properties)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(integration_runtime, 'IntegrationRuntimeResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore
-
- def get(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- if_none_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> Optional["models.IntegrationRuntimeResource"]
- """Gets an integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param if_none_match: ETag of the integration runtime entity. Should only be specified for get.
- If the ETag matches the existing entity tag, or if * was provided, then no content will be
- returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource or None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeResource"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 304]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore
-
- def update(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- auto_update=None, # type: Optional[Union[str, "models.IntegrationRuntimeAutoUpdate"]]
- update_delay_offset=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.IntegrationRuntimeResource"
- """Updates an integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param auto_update: Enables or disables the auto-update feature of the self-hosted integration
- runtime. See https://go.microsoft.com/fwlink/?linkid=854189.
- :type auto_update: str or ~data_factory_management_client.models.IntegrationRuntimeAutoUpdate
- :param update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The
- integration runtime auto update will happen on that time.
- :type update_delay_offset: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- update_integration_runtime_request = models.UpdateIntegrationRuntimeRequest(auto_update=auto_update, update_delay_offset=update_delay_offset)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore
-
- def delete(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Deletes an integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore
-
- def get_status(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> "models.IntegrationRuntimeStatusResponse"
- """Gets detailed status information for an integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeStatusResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get_status.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus'} # type: ignore
-
- def get_connection_info(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> "models.IntegrationRuntimeConnectionInfo"
- """Gets the on-premises integration runtime connection information for encrypting the on-premises
- data source credentials.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeConnectionInfo, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeConnectionInfo
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeConnectionInfo"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get_connection_info.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeConnectionInfo', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_connection_info.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getConnectionInfo'} # type: ignore
-
- def regenerate_auth_key(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- key_name=None, # type: Optional[Union[str, "models.IntegrationRuntimeAuthKeyName"]]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.IntegrationRuntimeAuthKeys"
- """Regenerates the authentication key for an integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param key_name: The name of the authentication key to regenerate.
- :type key_name: str or ~data_factory_management_client.models.IntegrationRuntimeAuthKeyName
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeAuthKeys, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- regenerate_key_parameters = models.IntegrationRuntimeRegenerateKeyParameters(key_name=key_name)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.regenerate_auth_key.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- regenerate_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/regenerateAuthKey'} # type: ignore
-
- def list_auth_key(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> "models.IntegrationRuntimeAuthKeys"
- """Retrieves the authentication keys for an integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeAuthKeys, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.list_auth_key.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- list_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/listAuthKeys'} # type: ignore
-
- def _start_initial(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> Optional["models.IntegrationRuntimeStatusResponse"]
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeStatusResponse"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self._start_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 202]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore
-
- def begin_start(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> LROPoller["models.IntegrationRuntimeStatusResponse"]
- """Starts a ManagedReserved type integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of LROPoller that returns either IntegrationRuntimeStatusResponse or the result of cls(response)
- :rtype: ~azure.core.polling.LROPoller[~data_factory_management_client.models.IntegrationRuntimeStatusResponse]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = self._start_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- integration_runtime_name=integration_runtime_name,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
- return deserialized
-
- if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = NoPolling()
- else: polling_method = polling
- if cont_token:
- return LROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore
-
- def _stop_initial(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self._stop_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 202]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore
-
- def begin_stop(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> LROPoller[None]
- """Stops a ManagedReserved type integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of LROPoller that returns either None or the result of cls(response)
- :rtype: ~azure.core.polling.LROPoller[None]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = self._stop_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- integration_runtime_name=integration_runtime_name,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- if cls:
- return cls(pipeline_response, None, {})
-
- if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = NoPolling()
- else: polling_method = polling
- if cont_token:
- return LROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore
-
- def sync_credentials(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Force the integration runtime to synchronize credentials across integration runtime nodes, and
- this will override the credentials across all worker nodes with those available on the
- dispatcher node. If you already have the latest credential backup file, you should manually
- import it (preferred) on any self-hosted integration runtime node than using this API directly.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.sync_credentials.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- sync_credentials.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/syncCredentials'} # type: ignore
-
- def get_monitoring_data(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> "models.IntegrationRuntimeMonitoringData"
- """Get the integration runtime monitoring data, which includes the monitor data for all the nodes
- under this integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeMonitoringData, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeMonitoringData
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeMonitoringData"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get_monitoring_data.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeMonitoringData', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_monitoring_data.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/monitoringData'} # type: ignore
-
- def upgrade(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Upgrade self-hosted integration runtime to latest version if availability.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.upgrade.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- upgrade.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/upgrade'} # type: ignore
-
- def remove_link(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- linked_factory_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Remove all linked integration runtimes under specific data factory in a self-hosted integration
- runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param linked_factory_name: The data factory name for linked integration runtime.
- :type linked_factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- linked_integration_runtime_request = models.LinkedIntegrationRuntimeRequest(linked_factory_name=linked_factory_name)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.remove_link.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- remove_link.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/removeLinks'} # type: ignore
-
- def create_linked_integration_runtime(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- name=None, # type: Optional[str]
- subscription_id=None, # type: Optional[str]
- data_factory_name=None, # type: Optional[str]
- data_factory_location=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.IntegrationRuntimeStatusResponse"
- """Create a linked integration runtime entry in a shared integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param name: The name of the linked integration runtime.
- :type name: str
- :param subscription_id: The ID of the subscription that the linked integration runtime belongs
- to.
- :type subscription_id: str
- :param data_factory_name: The name of the data factory that the linked integration runtime
- belongs to.
- :type data_factory_name: str
- :param data_factory_location: The location of the data factory that the linked integration
- runtime belongs to.
- :type data_factory_location: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeStatusResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- create_linked_integration_runtime_request = models.CreateLinkedIntegrationRuntimeRequest(name=name, subscription_id=subscription_id, data_factory_name=data_factory_name, data_factory_location=data_factory_location)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_linked_integration_runtime.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_linked_integration_runtime.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/linkedIntegrationRuntime'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtimes_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtimes_operations.py
index d0a57313403..651865ae0ea 100644
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtimes_operations.py
+++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtimes_operations.py
@@ -465,6 +465,69 @@ def get_status(
return deserialized
get_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus'} # type: ignore
+ def list_outbound_network_dependencies_endpoints(
+ self,
+ resource_group_name, # type: str
+ factory_name, # type: str
+ integration_runtime_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse"
+ """Gets the list of outbound network dependencies for a given Azure-SSIS integration runtime.
+
+ :param resource_group_name: The resource group name.
+ :type resource_group_name: str
+ :param factory_name: The factory name.
+ :type factory_name: str
+ :param integration_runtime_name: The integration runtime name.
+ :type integration_runtime_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse, or the result of cls(response)
+ :rtype: ~data_factory_management_client.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2018-06-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_outbound_network_dependencies_endpoints.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
+ 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
+ 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_outbound_network_dependencies_endpoints.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/outboundNetworkDependenciesEndpoints'} # type: ignore
+
def get_connection_info(
self,
resource_group_name, # type: str
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_service_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_service_operations.py
deleted file mode 100644
index 7124cb588eb..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_service_operations.py
+++ /dev/null
@@ -1,320 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import TYPE_CHECKING
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.paging import ItemPaged
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpRequest, HttpResponse
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from .. import models
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
-
- T = TypeVar('T')
- ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
-
-class LinkedServiceOperations(object):
- """LinkedServiceOperations operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer):
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list_by_factory(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> Iterable["models.LinkedServiceListResponse"]
- """Lists linked services.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either LinkedServiceListResponse or the result of cls(response)
- :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.LinkedServiceListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- def extract_data(pipeline_response):
- deserialized = self._deserialize('LinkedServiceListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, iter(list_of_elem)
-
- def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return ItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices'} # type: ignore
-
- def create_or_update(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- linked_service_name, # type: str
- properties, # type: "models.LinkedService"
- if_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.LinkedServiceResource"
- """Creates or updates a linked service.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param linked_service_name: The linked service name.
- :type linked_service_name: str
- :param properties: Properties of linked service.
- :type properties: ~data_factory_management_client.models.LinkedService
- :param if_match: ETag of the linkedService entity. Should only be specified for update, for
- which it should match existing entity or can be * for unconditional update.
- :type if_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: LinkedServiceResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.LinkedServiceResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- linked_service = models.LinkedServiceResource(properties=properties)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(linked_service, 'LinkedServiceResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('LinkedServiceResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore
-
- def get(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- linked_service_name, # type: str
- if_none_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> Optional["models.LinkedServiceResource"]
- """Gets a linked service.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param linked_service_name: The linked service name.
- :type linked_service_name: str
- :param if_none_match: ETag of the linked service entity. Should only be specified for get. If
- the ETag matches the existing entity tag, or if * was provided, then no content will be
- returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: LinkedServiceResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.LinkedServiceResource or None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 304]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('LinkedServiceResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore
-
- def delete(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- linked_service_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Deletes a linked service.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param linked_service_name: The linked service name.
- :type linked_service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoint_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoint_operations.py
deleted file mode 100644
index 29be0bd0e6d..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoint_operations.py
+++ /dev/null
@@ -1,344 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import TYPE_CHECKING
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.paging import ItemPaged
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpRequest, HttpResponse
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from .. import models
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar
-
- T = TypeVar('T')
- ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
-
-class ManagedPrivateEndpointOperations(object):
- """ManagedPrivateEndpointOperations operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer):
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list_by_factory(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- managed_virtual_network_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> Iterable["models.ManagedPrivateEndpointListResponse"]
- """Lists managed private endpoints.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param managed_virtual_network_name: Managed virtual network name.
- :type managed_virtual_network_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either ManagedPrivateEndpointListResponse or the result of cls(response)
- :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.ManagedPrivateEndpointListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- def extract_data(pipeline_response):
- deserialized = self._deserialize('ManagedPrivateEndpointListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, iter(list_of_elem)
-
- def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return ItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints'} # type: ignore
-
- def create_or_update(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- managed_virtual_network_name, # type: str
- managed_private_endpoint_name, # type: str
- if_match=None, # type: Optional[str]
- connection_state=None, # type: Optional["models.ConnectionStateProperties"]
- fqdns=None, # type: Optional[List[str]]
- group_id=None, # type: Optional[str]
- private_link_resource_id=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.ManagedPrivateEndpointResource"
- """Creates or updates a managed private endpoint.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param managed_virtual_network_name: Managed virtual network name.
- :type managed_virtual_network_name: str
- :param managed_private_endpoint_name: Managed private endpoint name.
- :type managed_private_endpoint_name: str
- :param if_match: ETag of the managed private endpoint entity. Should only be specified for
- update, for which it should match existing entity or can be * for unconditional update.
- :type if_match: str
- :param connection_state: The managed private endpoint connection state.
- :type connection_state: ~data_factory_management_client.models.ConnectionStateProperties
- :param fqdns: Fully qualified domain names.
- :type fqdns: list[str]
- :param group_id: The groupId to which the managed private endpoint is created.
- :type group_id: str
- :param private_link_resource_id: The ARM resource ID of the resource to which the managed
- private endpoint is created.
- :type private_link_resource_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ManagedPrivateEndpointResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- managed_private_endpoint = models.ManagedPrivateEndpointResource(connection_state=connection_state, fqdns=fqdns, group_id=group_id, private_link_resource_id=private_link_resource_id)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(managed_private_endpoint, 'ManagedPrivateEndpointResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore
-
- def get(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- managed_virtual_network_name, # type: str
- managed_private_endpoint_name, # type: str
- if_none_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.ManagedPrivateEndpointResource"
- """Gets a managed private endpoint.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param managed_virtual_network_name: Managed virtual network name.
- :type managed_virtual_network_name: str
- :param managed_private_endpoint_name: Managed private endpoint name.
- :type managed_private_endpoint_name: str
- :param if_none_match: ETag of the managed private endpoint entity. Should only be specified for
- get. If the ETag matches the existing entity tag, or if * was provided, then no content will be
- returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ManagedPrivateEndpointResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore
-
- def delete(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- managed_virtual_network_name, # type: str
- managed_private_endpoint_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Deletes a managed private endpoint.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param managed_virtual_network_name: Managed virtual network name.
- :type managed_virtual_network_name: str
- :param managed_private_endpoint_name: Managed private endpoint name.
- :type managed_private_endpoint_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_network_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_network_operations.py
deleted file mode 100644
index fa043ca3e59..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_network_operations.py
+++ /dev/null
@@ -1,262 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import TYPE_CHECKING
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.paging import ItemPaged
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpRequest, HttpResponse
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from .. import models
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
-
- T = TypeVar('T')
- ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
-
-class ManagedVirtualNetworkOperations(object):
- """ManagedVirtualNetworkOperations operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer):
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list_by_factory(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> Iterable["models.ManagedVirtualNetworkListResponse"]
- """Lists managed Virtual Networks.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either ManagedVirtualNetworkListResponse or the result of cls(response)
- :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.ManagedVirtualNetworkListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- def extract_data(pipeline_response):
- deserialized = self._deserialize('ManagedVirtualNetworkListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, iter(list_of_elem)
-
- def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return ItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks'} # type: ignore
-
- def create_or_update(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- managed_virtual_network_name, # type: str
- properties, # type: "models.ManagedVirtualNetwork"
- if_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.ManagedVirtualNetworkResource"
- """Creates or updates a managed Virtual Network.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param managed_virtual_network_name: Managed virtual network name.
- :type managed_virtual_network_name: str
- :param properties: Managed Virtual Network properties.
- :type properties: ~data_factory_management_client.models.ManagedVirtualNetwork
- :param if_match: ETag of the managed Virtual Network entity. Should only be specified for
- update, for which it should match existing entity or can be * for unconditional update.
- :type if_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ManagedVirtualNetworkResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- managed_virtual_network = models.ManagedVirtualNetworkResource(properties=properties)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(managed_virtual_network, 'ManagedVirtualNetworkResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore
-
- def get(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- managed_virtual_network_name, # type: str
- if_none_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.ManagedVirtualNetworkResource"
- """Gets a managed Virtual Network.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param managed_virtual_network_name: Managed virtual network name.
- :type managed_virtual_network_name: str
- :param if_none_match: ETag of the managed Virtual Network entity. Should only be specified for
- get. If the ETag matches the existing entity tag, or if * was provided, then no content will be
- returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ManagedVirtualNetworkResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operation_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operation_operations.py
deleted file mode 100644
index c5cf3d43f6d..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operation_operations.py
+++ /dev/null
@@ -1,106 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import TYPE_CHECKING
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.paging import ItemPaged
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpRequest, HttpResponse
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from .. import models
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
-
- T = TypeVar('T')
- ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
-
-class OperationOperations(object):
- """OperationOperations operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer):
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list(
- self,
- **kwargs # type: Any
- ):
- # type: (...) -> Iterable["models.OperationListResponse"]
- """Lists the available Azure Data Factory API operations.
-
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either OperationListResponse or the result of cls(response)
- :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.OperationListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list.metadata['url'] # type: ignore
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- def extract_data(pipeline_response):
- deserialized = self._deserialize('OperationListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, iter(list_of_elem)
-
- def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return ItemPaged(
- get_next, extract_data
- )
- list.metadata = {'url': '/providers/Microsoft.DataFactory/operations'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_operations.py
deleted file mode 100644
index d82f423f2cb..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_operations.py
+++ /dev/null
@@ -1,414 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import TYPE_CHECKING
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.paging import ItemPaged
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpRequest, HttpResponse
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from .. import models
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
-
- T = TypeVar('T')
- ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
-
-class PipelineOperations(object):
- """PipelineOperations operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer):
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list_by_factory(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> Iterable["models.PipelineListResponse"]
- """Lists pipelines.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either PipelineListResponse or the result of cls(response)
- :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.PipelineListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- def extract_data(pipeline_response):
- deserialized = self._deserialize('PipelineListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, iter(list_of_elem)
-
- def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return ItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines'} # type: ignore
-
- def create_or_update(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- pipeline_name, # type: str
- pipeline, # type: "models.PipelineResource"
- if_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.PipelineResource"
- """Creates or updates a pipeline.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param pipeline_name: The pipeline name.
- :type pipeline_name: str
- :param pipeline: Pipeline resource definition.
- :type pipeline: ~data_factory_management_client.models.PipelineResource
- :param if_match: ETag of the pipeline entity. Should only be specified for update, for which
- it should match existing entity or can be * for unconditional update.
- :type if_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: PipelineResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.PipelineResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(pipeline, 'PipelineResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('PipelineResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore
-
- def get(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- pipeline_name, # type: str
- if_none_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> Optional["models.PipelineResource"]
- """Gets a pipeline.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param pipeline_name: The pipeline name.
- :type pipeline_name: str
- :param if_none_match: ETag of the pipeline entity. Should only be specified for get. If the
- ETag matches the existing entity tag, or if * was provided, then no content will be returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: PipelineResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.PipelineResource or None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 304]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('PipelineResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore
-
- def delete(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- pipeline_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Deletes a pipeline.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param pipeline_name: The pipeline name.
- :type pipeline_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore
-
- def create_run(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- pipeline_name, # type: str
- reference_pipeline_run_id=None, # type: Optional[str]
- is_recovery=None, # type: Optional[bool]
- start_activity_name=None, # type: Optional[str]
- start_from_failure=None, # type: Optional[bool]
- parameters=None, # type: Optional[Dict[str, object]]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.CreateRunResponse"
- """Creates a run of a pipeline.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param pipeline_name: The pipeline name.
- :type pipeline_name: str
- :param reference_pipeline_run_id: The pipeline run identifier. If run ID is specified the
- parameters of the specified run will be used to create a new run.
- :type reference_pipeline_run_id: str
- :param is_recovery: Recovery mode flag. If recovery mode is set to true, the specified
- referenced pipeline run and the new run will be grouped under the same groupId.
- :type is_recovery: bool
- :param start_activity_name: In recovery mode, the rerun will start from this activity. If not
- specified, all activities will run.
- :type start_activity_name: str
- :param start_from_failure: In recovery mode, if set to true, the rerun will start from failed
- activities. The property will be used only if startActivityName is not specified.
- :type start_from_failure: bool
- :param parameters: Parameters of the pipeline run. These parameters will be used only if the
- runId is not specified.
- :type parameters: dict[str, object]
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: CreateRunResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.CreateRunResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.CreateRunResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_run.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if reference_pipeline_run_id is not None:
- query_parameters['referencePipelineRunId'] = self._serialize.query("reference_pipeline_run_id", reference_pipeline_run_id, 'str')
- if is_recovery is not None:
- query_parameters['isRecovery'] = self._serialize.query("is_recovery", is_recovery, 'bool')
- if start_activity_name is not None:
- query_parameters['startActivityName'] = self._serialize.query("start_activity_name", start_activity_name, 'str')
- if start_from_failure is not None:
- query_parameters['startFromFailure'] = self._serialize.query("start_from_failure", start_from_failure, 'bool')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- if parameters is not None:
- body_content = self._serialize.body(parameters, '{object}')
- else:
- body_content = None
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('CreateRunResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}/createRun'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_run_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_run_operations.py
deleted file mode 100644
index 75634fde5ac..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_run_operations.py
+++ /dev/null
@@ -1,250 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-import datetime
-from typing import TYPE_CHECKING
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpRequest, HttpResponse
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from .. import models
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
-
- T = TypeVar('T')
- ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
-
-class PipelineRunOperations(object):
- """PipelineRunOperations operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer):
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def query_by_factory(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- last_updated_after, # type: datetime.datetime
- last_updated_before, # type: datetime.datetime
- continuation_token_parameter=None, # type: Optional[str]
- filters=None, # type: Optional[List["models.RunQueryFilter"]]
- order_by=None, # type: Optional[List["models.RunQueryOrderBy"]]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.PipelineRunsQueryResponse"
- """Query pipeline runs in the factory based on input filter conditions.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601'
- format.
- :type last_updated_after: ~datetime.datetime
- :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601'
- format.
- :type last_updated_before: ~datetime.datetime
- :param continuation_token_parameter: The continuation token for getting the next page of
- results. Null for first page.
- :type continuation_token_parameter: str
- :param filters: List of filters.
- :type filters: list[~data_factory_management_client.models.RunQueryFilter]
- :param order_by: List of OrderBy option.
- :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy]
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: PipelineRunsQueryResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.PipelineRunsQueryResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRunsQueryResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.query_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(filter_parameters, 'RunFilterParameters')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('PipelineRunsQueryResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryPipelineRuns'} # type: ignore
-
- def get(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- run_id, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> "models.PipelineRun"
- """Get a pipeline run by its run ID.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param run_id: The pipeline run identifier.
- :type run_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: PipelineRun, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.PipelineRun
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRun"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'runId': self._serialize.url("run_id", run_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('PipelineRun', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}'} # type: ignore
-
- def cancel(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- run_id, # type: str
- is_recursive=None, # type: Optional[bool]
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Cancel a pipeline run by its run ID.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param run_id: The pipeline run identifier.
- :type run_id: str
- :param is_recursive: If true, cancel all the Child pipelines that are triggered by the current
- pipeline.
- :type is_recursive: bool
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.cancel.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'runId': self._serialize.url("run_id", run_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- if is_recursive is not None:
- query_parameters['isRecursive'] = self._serialize.query("is_recursive", is_recursive, 'bool')
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/cancel'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py
deleted file mode 100644
index 142f32f2c31..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py
+++ /dev/null
@@ -1,895 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import TYPE_CHECKING
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.paging import ItemPaged
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpRequest, HttpResponse
-from azure.core.polling import LROPoller, NoPolling, PollingMethod
-from azure.mgmt.core.exceptions import ARMErrorFormat
-from azure.mgmt.core.polling.arm_polling import ARMPolling
-
-from .. import models
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
-
- T = TypeVar('T')
- ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
-
-class TriggerOperations(object):
- """TriggerOperations operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer):
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list_by_factory(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> Iterable["models.TriggerListResponse"]
- """Lists triggers.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either TriggerListResponse or the result of cls(response)
- :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.TriggerListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- def extract_data(pipeline_response):
- deserialized = self._deserialize('TriggerListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, iter(list_of_elem)
-
- def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return ItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers'} # type: ignore
-
- def query_by_factory(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- continuation_token_parameter=None, # type: Optional[str]
- parent_trigger_name=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.TriggerQueryResponse"
- """Query triggers.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param continuation_token_parameter: The continuation token for getting the next page of
- results. Null for first page.
- :type continuation_token_parameter: str
- :param parent_trigger_name: The name of the parent TumblingWindowTrigger to get the child rerun
- triggers.
- :type parent_trigger_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: TriggerQueryResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.TriggerQueryResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerQueryResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- filter_parameters = models.TriggerFilterParameters(continuation_token=continuation_token_parameter, parent_trigger_name=parent_trigger_name)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.query_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(filter_parameters, 'TriggerFilterParameters')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('TriggerQueryResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/querytriggers'} # type: ignore
-
- def create_or_update(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- trigger_name, # type: str
- properties, # type: "models.Trigger"
- if_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.TriggerResource"
- """Creates or updates a trigger.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :param properties: Properties of the trigger.
- :type properties: ~data_factory_management_client.models.Trigger
- :param if_match: ETag of the trigger entity. Should only be specified for update, for which it
- should match existing entity or can be * for unconditional update.
- :type if_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: TriggerResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.TriggerResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- trigger = models.TriggerResource(properties=properties)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(trigger, 'TriggerResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('TriggerResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore
-
- def get(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- trigger_name, # type: str
- if_none_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> Optional["models.TriggerResource"]
- """Gets a trigger.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :param if_none_match: ETag of the trigger entity. Should only be specified for get. If the ETag
- matches the existing entity tag, or if * was provided, then no content will be returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: TriggerResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.TriggerResource or None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 304]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('TriggerResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore
-
- def delete(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- trigger_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Deletes a trigger.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore
-
- def _subscribe_to_event_initial(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- trigger_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> Optional["models.TriggerSubscriptionOperationStatus"]
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self._subscribe_to_event_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 202]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- _subscribe_to_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore
-
- def begin_subscribe_to_event(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- trigger_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> LROPoller["models.TriggerSubscriptionOperationStatus"]
- """Subscribe event trigger to events.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of LROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response)
- :rtype: ~azure.core.polling.LROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = self._subscribe_to_event_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- trigger_name=trigger_name,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
- return deserialized
-
- if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = NoPolling()
- else: polling_method = polling
- if cont_token:
- return LROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_subscribe_to_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore
-
- def get_event_subscription_status(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- trigger_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> "models.TriggerSubscriptionOperationStatus"
- """Get a trigger's event subscription status.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: TriggerSubscriptionOperationStatus, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.TriggerSubscriptionOperationStatus
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get_event_subscription_status.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_event_subscription_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/getEventSubscriptionStatus'} # type: ignore
-
- def _unsubscribe_from_event_initial(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- trigger_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> Optional["models.TriggerSubscriptionOperationStatus"]
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self._unsubscribe_from_event_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 202]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- _unsubscribe_from_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore
-
- def begin_unsubscribe_from_event(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- trigger_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> LROPoller["models.TriggerSubscriptionOperationStatus"]
- """Unsubscribe event trigger from events.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of LROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response)
- :rtype: ~azure.core.polling.LROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = self._unsubscribe_from_event_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- trigger_name=trigger_name,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
- return deserialized
-
- if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = NoPolling()
- else: polling_method = polling
- if cont_token:
- return LROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_unsubscribe_from_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore
-
- def _start_initial(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- trigger_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self._start_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore
-
- def begin_start(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- trigger_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> LROPoller[None]
- """Starts a trigger.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of LROPoller that returns either None or the result of cls(response)
- :rtype: ~azure.core.polling.LROPoller[None]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = self._start_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- trigger_name=trigger_name,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- if cls:
- return cls(pipeline_response, None, {})
-
- if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = NoPolling()
- else: polling_method = polling
- if cont_token:
- return LROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore
-
- def _stop_initial(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- trigger_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self._stop_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore
-
- def begin_stop(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- trigger_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> LROPoller[None]
- """Stops a trigger.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of LROPoller that returns either None or the result of cls(response)
- :rtype: ~azure.core.polling.LROPoller[None]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = self._stop_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- trigger_name=trigger_name,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- if cls:
- return cls(pipeline_response, None, {})
-
- if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = NoPolling()
- else: polling_method = polling
- if cont_token:
- return LROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_run_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_run_operations.py
deleted file mode 100644
index 3290d8196ab..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_run_operations.py
+++ /dev/null
@@ -1,248 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-import datetime
-from typing import TYPE_CHECKING
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpRequest, HttpResponse
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from .. import models
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
-
- T = TypeVar('T')
- ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
-
-class TriggerRunOperations(object):
- """TriggerRunOperations operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer):
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def rerun(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- trigger_name, # type: str
- run_id, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Rerun single trigger instance by runId.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :param run_id: The pipeline run identifier.
- :type run_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.rerun.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- 'runId': self._serialize.url("run_id", run_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- rerun.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/rerun'} # type: ignore
-
- def cancel(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- trigger_name, # type: str
- run_id, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Cancel a single trigger instance by runId.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :param run_id: The pipeline run identifier.
- :type run_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.cancel.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- 'runId': self._serialize.url("run_id", run_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/cancel'} # type: ignore
-
- def query_by_factory(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- last_updated_after, # type: datetime.datetime
- last_updated_before, # type: datetime.datetime
- continuation_token_parameter=None, # type: Optional[str]
- filters=None, # type: Optional[List["models.RunQueryFilter"]]
- order_by=None, # type: Optional[List["models.RunQueryOrderBy"]]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.TriggerRunsQueryResponse"
- """Query trigger runs.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601'
- format.
- :type last_updated_after: ~datetime.datetime
- :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601'
- format.
- :type last_updated_before: ~datetime.datetime
- :param continuation_token_parameter: The continuation token for getting the next page of
- results. Null for first page.
- :type continuation_token_parameter: str
- :param filters: List of filters.
- :type filters: list[~data_factory_management_client.models.RunQueryFilter]
- :param order_by: List of OrderBy option.
- :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy]
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: TriggerRunsQueryResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.TriggerRunsQueryResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerRunsQueryResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.query_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(filter_parameters, 'RunFilterParameters')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('TriggerRunsQueryResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryTriggerRuns'} # type: ignore
diff --git a/src/datafactory/gen.zip b/src/datafactory/gen.zip
deleted file mode 100644
index 296cd2dfd07..00000000000
Binary files a/src/datafactory/gen.zip and /dev/null differ
diff --git a/src/datafactory/linter_exclusions.yml b/src/datafactory/linter_exclusions.yml
deleted file mode 100644
index cdfa831be54..00000000000
--- a/src/datafactory/linter_exclusions.yml
+++ /dev/null
@@ -1,5 +0,0 @@
-datafactory get-git-hub-access-token:
- parameters:
- git_hub_access_token_base_url:
- rule_exclusions:
- - option_length_too_long
diff --git a/src/datafactory/report.md b/src/datafactory/report.md
index 1d9bdfb4cf1..0f562e875d9 100644
--- a/src/datafactory/report.md
+++ b/src/datafactory/report.md
@@ -19,6 +19,9 @@
|az datafactory activity-run|ActivityRuns|[commands](#CommandsInActivityRuns)|
|az datafactory trigger|Triggers|[commands](#CommandsInTriggers)|
|az datafactory trigger-run|TriggerRuns|[commands](#CommandsInTriggerRuns)|
+|az datafactory private-end-point-connection|privateEndPointConnections|[commands](#CommandsInprivateEndPointConnections)|
+|az datafactory private-endpoint-connection|PrivateEndpointConnection|[commands](#CommandsInPrivateEndpointConnection)|
+|az datafactory private-link-resource|privateLinkResources|[commands](#CommandsInprivateLinkResources)|
## COMMANDS
### Commands in `az datafactory` group
@@ -62,6 +65,7 @@
|[az datafactory integration-runtime get-monitoring-data](#IntegrationRuntimesGetMonitoringData)|GetMonitoringData|[Parameters](#ParametersIntegrationRuntimesGetMonitoringData)|[Example](#ExamplesIntegrationRuntimesGetMonitoringData)|
|[az datafactory integration-runtime get-status](#IntegrationRuntimesGetStatus)|GetStatus|[Parameters](#ParametersIntegrationRuntimesGetStatus)|[Example](#ExamplesIntegrationRuntimesGetStatus)|
|[az datafactory integration-runtime list-auth-key](#IntegrationRuntimesListAuthKeys)|ListAuthKeys|[Parameters](#ParametersIntegrationRuntimesListAuthKeys)|[Example](#ExamplesIntegrationRuntimesListAuthKeys)|
+|[az datafactory integration-runtime list-outbound-network-dependency-endpoint](#IntegrationRuntimesListOutboundNetworkDependenciesEndpoints)|ListOutboundNetworkDependenciesEndpoints|[Parameters](#ParametersIntegrationRuntimesListOutboundNetworkDependenciesEndpoints)|[Example](#ExamplesIntegrationRuntimesListOutboundNetworkDependenciesEndpoints)|
|[az datafactory integration-runtime regenerate-auth-key](#IntegrationRuntimesRegenerateAuthKey)|RegenerateAuthKey|[Parameters](#ParametersIntegrationRuntimesRegenerateAuthKey)|[Example](#ExamplesIntegrationRuntimesRegenerateAuthKey)|
|[az datafactory integration-runtime remove-link](#IntegrationRuntimesRemoveLinks)|RemoveLinks|[Parameters](#ParametersIntegrationRuntimesRemoveLinks)|[Example](#ExamplesIntegrationRuntimesRemoveLinks)|
|[az datafactory integration-runtime start](#IntegrationRuntimesStart)|Start|[Parameters](#ParametersIntegrationRuntimesStart)|[Example](#ExamplesIntegrationRuntimesStart)|
@@ -103,6 +107,24 @@
|[az datafactory pipeline-run cancel](#PipelineRunsCancel)|Cancel|[Parameters](#ParametersPipelineRunsCancel)|[Example](#ExamplesPipelineRunsCancel)|
|[az datafactory pipeline-run query-by-factory](#PipelineRunsQueryByFactory)|QueryByFactory|[Parameters](#ParametersPipelineRunsQueryByFactory)|[Example](#ExamplesPipelineRunsQueryByFactory)|
+### Commands in `az datafactory private-end-point-connection` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az datafactory private-end-point-connection list](#privateEndPointConnectionsListByFactory)|ListByFactory|[Parameters](#ParametersprivateEndPointConnectionsListByFactory)|[Example](#ExamplesprivateEndPointConnectionsListByFactory)|
+
+### Commands in `az datafactory private-endpoint-connection` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az datafactory private-endpoint-connection show](#PrivateEndpointConnectionGet)|Get|[Parameters](#ParametersPrivateEndpointConnectionGet)|[Example](#ExamplesPrivateEndpointConnectionGet)|
+|[az datafactory private-endpoint-connection create](#PrivateEndpointConnectionCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersPrivateEndpointConnectionCreateOrUpdate#Create)|[Example](#ExamplesPrivateEndpointConnectionCreateOrUpdate#Create)|
+|[az datafactory private-endpoint-connection update](#PrivateEndpointConnectionCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersPrivateEndpointConnectionCreateOrUpdate#Update)|Not Found|
+|[az datafactory private-endpoint-connection delete](#PrivateEndpointConnectionDelete)|Delete|[Parameters](#ParametersPrivateEndpointConnectionDelete)|[Example](#ExamplesPrivateEndpointConnectionDelete)|
+
+### Commands in `az datafactory private-link-resource` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az datafactory private-link-resource show](#privateLinkResourcesGet)|Get|[Parameters](#ParametersprivateLinkResourcesGet)|[Example](#ExamplesprivateLinkResourcesGet)|
+
### Commands in `az datafactory trigger` group
|CLI Command|Operation Swagger name|Parameters|Examples|
|---------|------------|--------|-----------|
@@ -176,9 +198,15 @@ az datafactory create --location "East US" --name "exampleFactoryName" --resourc
|**--if-match**|string|ETag of the factory entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match|
|**--location**|string|The resource location.|location|location|
|**--tags**|dictionary|The resource tags.|tags|tags|
-|**--factory-vsts-configuration**|object|Factory's VSTS repo information.|factory_vsts_configuration|FactoryVSTSConfiguration|
-|**--factory-git-hub-configuration**|object|Factory's GitHub repo information.|factory_git_hub_configuration|FactoryGitHubConfiguration|
+|**--repo-configuration**|object|Git repo information of the factory.|repo_configuration|repoConfiguration|
|**--global-parameters**|dictionary|List of parameters for factory.|global_parameters|globalParameters|
+|**--public-network-access**|choice|Whether or not public network access is allowed for the data factory.|public_network_access|publicNetworkAccess|
+|**--key-name**|string|The name of the key in Azure Key Vault to use as Customer Managed Key.|key_name|keyName|
+|**--vault-base-url**|string|The url of the Azure Key Vault used for CMK.|vault_base_url|vaultBaseUrl|
+|**--key-version**|string|The version of the key used for CMK. If not provided, latest version will be used.|key_version|keyVersion|
+|**--identity**|object|User assigned identity to use to authenticate to customer's key vault. If not provided Managed Service Identity will be used.|identity|identity|
+|**--type**|choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|List of user assigned identities for the factory.|user_assigned_identities|userAssignedIdentities|
#### Command `az datafactory update`
@@ -193,6 +221,8 @@ az datafactory update --name "exampleFactoryName" --tags exampleTag="exampleValu
|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
|**--factory-name**|string|The factory name.|factory_name|factoryName|
|**--tags**|dictionary|The resource tags.|tags|tags|
+|**--type**|choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|List of user assigned identities for the factory.|user_assigned_identities|userAssignedIdentities|
#### Command `az datafactory delete`
@@ -211,17 +241,17 @@ az datafactory delete --name "exampleFactoryName" --resource-group "exampleResou
##### Example
```
az datafactory configure-factory-repo --factory-resource-id "/subscriptions/12345678-1234-1234-1234-12345678abc/resourc\
-eGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName" \
---factory-vsts-configuration account-name="ADF" collaboration-branch="master" last-commit-id="" project-name="project" \
-repository-name="repo" root-folder="/" tenant-id="" --location "East US"
+eGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName" --repo-configuration \
+"{\\"type\\":\\"FactoryVSTSConfiguration\\",\\"accountName\\":\\"ADF\\",\\"collaborationBranch\\":\\"master\\",\\"lastC\
+ommitId\\":\\"\\",\\"projectName\\":\\"project\\",\\"repositoryName\\":\\"repo\\",\\"rootFolder\\":\\"/\\",\\"tenantId\
+\\":\\"\\"}" --location "East US"
```
##### Parameters
|Option|Type|Description|Path (SDK)|Swagger name|
|------|----|-----------|----------|------------|
|**--location**|string|The location identifier.|location|locationId|
|**--factory-resource-id**|string|The factory resource id.|factory_resource_id|factoryResourceId|
-|**--factory-vsts-configuration**|object|Factory's VSTS repo information.|factory_vsts_configuration|FactoryVSTSConfiguration|
-|**--factory-git-hub-configuration**|object|Factory's GitHub repo information.|factory_git_hub_configuration|FactoryGitHubConfiguration|
+|**--repo-configuration**|object|Git repo information of the factory.|repo_configuration|repoConfiguration|
#### Command `az datafactory get-data-plane-access`
@@ -257,6 +287,7 @@ az datafactory get-git-hub-access-token --name "exampleFactoryName" --git-hub-ac
|**--git-hub-access-code**|string|GitHub access code.|git_hub_access_code|gitHubAccessCode|
|**--git-hub-access-token-base-url**|string|GitHub access token base URL.|git_hub_access_token_base_url|gitHubAccessTokenBaseUrl|
|**--git-hub-client-id**|string|GitHub application client ID.|git_hub_client_id|gitHubClientId|
+|**--git-hub-client-secret**|object|GitHub bring your own app client secret information.|git_hub_client_secret|gitHubClientSecret|
### group `az datafactory activity-run`
#### Command `az datafactory activity-run query-by-pipeline-run`
@@ -423,6 +454,7 @@ az datafactory integration-runtime linked-integration-runtime create --name "bfa
|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName|
|**--if-match**|string|ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match|
|**--description**|string|Integration runtime description.|managed_description|description|
+|**--managed-virtual-network**|object|Managed Virtual Network reference.|managed_managed_virtual_network|managedVirtualNetwork|
|**--compute-properties**|object|The compute resource for managed integration runtime.|managed_compute_properties|computeProperties|
|**--ssis-properties**|object|SSIS properties for managed integration runtime.|managed_ssis_properties|ssisProperties|
@@ -529,6 +561,20 @@ az datafactory integration-runtime list-auth-key --factory-name "exampleFactoryN
|**--factory-name**|string|The factory name.|factory_name|factoryName|
|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName|
+#### Command `az datafactory integration-runtime list-outbound-network-dependency-endpoint`
+
+##### Example
+```
+az datafactory integration-runtime list-outbound-network-dependency-endpoint --factory-name "exampleFactoryName" \
+--name "exampleIntegrationRuntime" --resource-group "exampleResourceGroup"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--factory-name**|string|The factory name.|factory_name|factoryName|
+|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName|
+
#### Command `az datafactory integration-runtime regenerate-auth-key`
##### Example
@@ -823,7 +869,7 @@ et\\"}],\\"outputs\\":[{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{\\"M
"typeProperties\\":{\\"dataIntegrationUnits\\":32,\\"sink\\":{\\"type\\":\\"BlobSink\\"},\\"source\\":{\\"type\\":\\"Bl\
obSource\\"}}}],\\"isSequential\\":true,\\"items\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@pipeline().parameters.\
OutputBlobNameList\\"}}}]" --parameters "{\\"OutputBlobNameList\\":{\\"type\\":\\"Array\\"}}" --duration "0.00:10:00" \
---name "examplePipeline" --resource-group "exampleResourceGroup"
+--pipeline-name "examplePipeline" --resource-group "exampleResourceGroup"
```
##### Parameters
|Option|Type|Description|Path (SDK)|Swagger name|
@@ -840,7 +886,7 @@ OutputBlobNameList\\"}}}]" --parameters "{\\"OutputBlobNameList\\":{\\"type\\":\
|**--annotations**|array|List of tags that can be used for describing the Pipeline.|annotations|annotations|
|**--run-dimensions**|dictionary|Dimensions emitted by Pipeline.|run_dimensions|runDimensions|
|**--duration**|any|TimeSpan value, after which an Azure Monitoring Metric is fired.|duration|duration|
-|**--folder-name**|string|The name of the folder that this Pipeline is in.|folder_name|name|
+|**--name**|string|The name of the folder that this Pipeline is in.|name|name|
#### Command `az datafactory pipeline delete`
@@ -924,6 +970,91 @@ operator="Equals" values="examplePipeline" --last-updated-after "2018-06-16T00:3
|**--filters**|array|List of filters.|filters|filters|
|**--order-by**|array|List of OrderBy option.|order_by|orderBy|
+### group `az datafactory private-end-point-connection`
+#### Command `az datafactory private-end-point-connection list`
+
+##### Example
+```
+az datafactory private-end-point-connection list --factory-name "exampleFactoryName" --resource-group \
+"exampleResourceGroup"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--factory-name**|string|The factory name.|factory_name|factoryName|
+
+### group `az datafactory private-endpoint-connection`
+#### Command `az datafactory private-endpoint-connection show`
+
+##### Example
+```
+az datafactory private-endpoint-connection show --factory-name "exampleFactoryName" --name "connection" \
+--resource-group "exampleResourceGroup"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--factory-name**|string|The factory name.|factory_name|factoryName|
+|**--private-endpoint-connection-name**|string|The private endpoint connection name.|private_endpoint_connection_name|privateEndpointConnectionName|
+|**--if-none-match**|string|ETag of the private endpoint connection entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match|
+
+#### Command `az datafactory private-endpoint-connection create`
+
+##### Example
+```
+az datafactory private-endpoint-connection create --factory-name "exampleFactoryName" --name "connection" \
+--private-link-service-connection-state description="Approved by admin." actions-required="" status="Approved" \
+--resource-group "exampleResourceGroup"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--factory-name**|string|The factory name.|factory_name|factoryName|
+|**--private-endpoint-connection-name**|string|The private endpoint connection name.|private_endpoint_connection_name|privateEndpointConnectionName|
+|**--if-match**|string|ETag of the private endpoint connection entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match|
+|**--private-link-service-connection-state**|object|The state of a private link connection|private_link_service_connection_state|privateLinkServiceConnectionState|
+
+#### Command `az datafactory private-endpoint-connection update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--factory-name**|string|The factory name.|factory_name|factoryName|
+|**--private-endpoint-connection-name**|string|The private endpoint connection name.|private_endpoint_connection_name|privateEndpointConnectionName|
+|**--if-match**|string|ETag of the private endpoint connection entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match|
+|**--private-link-service-connection-state**|object|The state of a private link connection|private_link_service_connection_state|privateLinkServiceConnectionState|
+
+#### Command `az datafactory private-endpoint-connection delete`
+
+##### Example
+```
+az datafactory private-endpoint-connection delete --factory-name "exampleFactoryName" --name "connection" \
+--resource-group "exampleResourceGroup"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--factory-name**|string|The factory name.|factory_name|factoryName|
+|**--private-endpoint-connection-name**|string|The private endpoint connection name.|private_endpoint_connection_name|privateEndpointConnectionName|
+
+### group `az datafactory private-link-resource`
+#### Command `az datafactory private-link-resource show`
+
+##### Example
+```
+az datafactory private-link-resource show --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--factory-name**|string|The factory name.|factory_name|factoryName|
+
### group `az datafactory trigger`
#### Command `az datafactory trigger list`