Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,8 +57,11 @@
from .operation_service_specification import OperationServiceSpecification
from .operation import Operation
from .operation_list_response import OperationListResponse
from .azure_databricks_linked_service import AzureDatabricksLinkedService
from .azure_data_lake_analytics_linked_service import AzureDataLakeAnalyticsLinkedService
from .hd_insight_on_demand_linked_service import HDInsightOnDemandLinkedService
from .netezza_linked_service import NetezzaLinkedService
from .vertica_linked_service import VerticaLinkedService
from .zoho_linked_service import ZohoLinkedService
from .xero_linked_service import XeroLinkedService
from .square_linked_service import SquareLinkedService
Expand Down Expand Up @@ -126,6 +129,8 @@
from .sql_server_linked_service import SqlServerLinkedService
from .azure_sql_dw_linked_service import AzureSqlDWLinkedService
from .azure_storage_linked_service import AzureStorageLinkedService
from .vertica_table_dataset import VerticaTableDataset
from .netezza_table_dataset import NetezzaTableDataset
from .zoho_object_dataset import ZohoObjectDataset
from .xero_object_dataset import XeroObjectDataset
from .square_object_dataset import SquareObjectDataset
Expand Down Expand Up @@ -195,6 +200,7 @@
from .schedule_trigger import ScheduleTrigger
from .multiple_pipeline_trigger import MultiplePipelineTrigger
from .activity_policy import ActivityPolicy
from .databricks_notebook_activity import DatabricksNotebookActivity
from .data_lake_analytics_usql_activity import DataLakeAnalyticsUSQLActivity
from .azure_ml_update_resource_activity import AzureMLUpdateResourceActivity
from .azure_ml_web_service_file import AzureMLWebServiceFile
Expand All @@ -204,6 +210,8 @@
from .web_activity import WebActivity
from .redshift_unload_settings import RedshiftUnloadSettings
from .amazon_redshift_source import AmazonRedshiftSource
from .vertica_source import VerticaSource
from .netezza_source import NetezzaSource
from .zoho_source import ZohoSource
from .xero_source import XeroSource
from .square_source import SquareSource
Expand Down Expand Up @@ -433,8 +441,11 @@
'OperationServiceSpecification',
'Operation',
'OperationListResponse',
'AzureDatabricksLinkedService',
'AzureDataLakeAnalyticsLinkedService',
'HDInsightOnDemandLinkedService',
'NetezzaLinkedService',
'VerticaLinkedService',
'ZohoLinkedService',
'XeroLinkedService',
'SquareLinkedService',
Expand Down Expand Up @@ -502,6 +513,8 @@
'SqlServerLinkedService',
'AzureSqlDWLinkedService',
'AzureStorageLinkedService',
'VerticaTableDataset',
'NetezzaTableDataset',
'ZohoObjectDataset',
'XeroObjectDataset',
'SquareObjectDataset',
Expand Down Expand Up @@ -571,6 +584,7 @@
'ScheduleTrigger',
'MultiplePipelineTrigger',
'ActivityPolicy',
'DatabricksNotebookActivity',
'DataLakeAnalyticsUSQLActivity',
'AzureMLUpdateResourceActivity',
'AzureMLWebServiceFile',
Expand All @@ -580,6 +594,8 @@
'WebActivity',
'RedshiftUnloadSettings',
'AmazonRedshiftSource',
'VerticaSource',
'NetezzaSource',
'ZohoSource',
'XeroSource',
'SquareSource',
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------

from .linked_service import LinkedService


class AzureDatabricksLinkedService(LinkedService):
"""Azure Databricks linked service.

:param additional_properties: Unmatched properties from the message are
deserialized this collection
:type additional_properties: dict[str, object]
:param connect_via: The integration runtime reference.
:type connect_via:
~azure.mgmt.datafactory.models.IntegrationRuntimeReference
:param description: Linked service description.
:type description: str
:param parameters: Parameters for linked service.
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
:type annotations: list[object]
:param type: Constant filled by server.
:type type: str
:param domain: <REGION>.azuredatabricks.net, domain name of your
Databricks deployment. Type: string (or Expression with resultType
string).
:type domain: object
:param access_token: Access token for databricks REST API. Refer to
https://docs.azuredatabricks.net/api/latest/authentication.html. Type:
string (or Expression with resultType string).
:type access_token: ~azure.mgmt.datafactory.models.SecretBase
:param existing_cluster_id: The id of an existing cluster that will be
used for all runs of this job. Type: string (or Expression with resultType
string).
:type existing_cluster_id: object
:param new_cluster_version: The Spark version of new cluster. Type: string
(or Expression with resultType string).
:type new_cluster_version: object
:param new_cluster_num_of_worker: Number of worker nodes that new cluster
should have. A string formatted Int32, like '1' means numOfWorker is 1 or
'1:10' means auto-scale from 1 as min and 10 as max. Type: string (or
Expression with resultType string).
:type new_cluster_num_of_worker: object
:param new_cluster_node_type: The node types of new cluster. Type: string
(or Expression with resultType string).
:type new_cluster_node_type: object
:param new_cluster_spark_conf: a set of optional, user-specified Spark
configuration key-value pairs.
:type new_cluster_spark_conf: dict[str, object]
:param encrypted_credential: The encrypted credential used for
authentication. Credentials are encrypted using the integration runtime
credential manager. Type: string (or Expression with resultType string).
:type encrypted_credential: object
"""

_validation = {
'type': {'required': True},
'domain': {'required': True},
'access_token': {'required': True},
}

_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'},
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
'type': {'key': 'type', 'type': 'str'},
'domain': {'key': 'typeProperties.domain', 'type': 'object'},
'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'},
'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'},
'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'},
'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'},
'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'},
'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
}

def __init__(self, domain, access_token, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, existing_cluster_id=None, new_cluster_version=None, new_cluster_num_of_worker=None, new_cluster_node_type=None, new_cluster_spark_conf=None, encrypted_credential=None):
super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations)
self.domain = domain
self.access_token = access_token
self.existing_cluster_id = existing_cluster_id
self.new_cluster_version = new_cluster_version
self.new_cluster_num_of_worker = new_cluster_num_of_worker
self.new_cluster_node_type = new_cluster_node_type
self.new_cluster_spark_conf = new_cluster_spark_conf
self.encrypted_credential = encrypted_credential
self.type = 'AzureDatabricks'
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,13 @@ class CopySource(Model):
"""A copy activity source.

You probably want to use the sub-classes and not this class directly. Known
sub-classes are: AmazonRedshiftSource, ZohoSource, XeroSource,
SquareSource, SparkSource, ShopifySource, ServiceNowSource,
QuickBooksSource, PrestoSource, PhoenixSource, PaypalSource, MarketoSource,
MariaDBSource, MagentoSource, JiraSource, ImpalaSource, HubspotSource,
HiveSource, HBaseSource, GreenplumSource, GoogleBigQuerySource,
EloquaSource, DrillSource, CouchbaseSource, ConcurSource,
AzurePostgreSqlSource, AmazonMWSSource, HttpSource,
sub-classes are: AmazonRedshiftSource, VerticaSource, NetezzaSource,
ZohoSource, XeroSource, SquareSource, SparkSource, ShopifySource,
ServiceNowSource, QuickBooksSource, PrestoSource, PhoenixSource,
PaypalSource, MarketoSource, MariaDBSource, MagentoSource, JiraSource,
ImpalaSource, HubspotSource, HiveSource, HBaseSource, GreenplumSource,
GoogleBigQuerySource, EloquaSource, DrillSource, CouchbaseSource,
ConcurSource, AzurePostgreSqlSource, AmazonMWSSource, HttpSource,
AzureDataLakeStoreSource, MongoDbSource, CassandraSource, WebSource,
OracleSource, AzureMySqlSource, HdfsSource, FileSystemSource, SqlDWSource,
SqlSource, SapEccSource, SapCloudForCustomerSource, SalesforceSource,
Expand Down Expand Up @@ -55,7 +55,7 @@ class CopySource(Model):
}

_subtype_map = {
'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlSource': 'SqlSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource'}
'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlSource': 'SqlSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource'}
}

def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None):
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------

from .execution_activity import ExecutionActivity


class DatabricksNotebookActivity(ExecutionActivity):
"""DatabricksNotebook activity.

:param additional_properties: Unmatched properties from the message are
deserialized this collection
:type additional_properties: dict[str, object]
:param name: Activity name.
:type name: str
:param description: Activity description.
:type description: str
:param depends_on: Activity depends on condition.
:type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency]
:param type: Constant filled by server.
:type type: str
:param linked_service_name: Linked service reference.
:type linked_service_name:
~azure.mgmt.datafactory.models.LinkedServiceReference
:param policy: Activity policy.
:type policy: ~azure.mgmt.datafactory.models.ActivityPolicy
:param notebook_path: The absolute path of the notebook to be run in the
Databricks Workspace. This path must begin with a slash. Type: string (or
Expression with resultType string).
:type notebook_path: object
:param base_parameters: Base parameters to be used for each run of this
job.If the notebook takes a parameter that is not specified, the default
value from the notebook will be used.
:type base_parameters: dict[str, object]
"""

_validation = {
'name': {'required': True},
'type': {'required': True},
'notebook_path': {'required': True},
}

_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'},
'type': {'key': 'type', 'type': 'str'},
'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'},
'policy': {'key': 'policy', 'type': 'ActivityPolicy'},
'notebook_path': {'key': 'typeProperties.notebookPath', 'type': 'object'},
'base_parameters': {'key': 'typeProperties.baseParameters', 'type': '{object}'},
}

def __init__(self, name, notebook_path, additional_properties=None, description=None, depends_on=None, linked_service_name=None, policy=None, base_parameters=None):
super(DatabricksNotebookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, linked_service_name=linked_service_name, policy=policy)
self.notebook_path = notebook_path
self.base_parameters = base_parameters
self.type = 'DatabricksNotebook'
Loading