diff --git a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/models/_models.py b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/models/_models.py index e82a36f58589..504bd080f542 100644 --- a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/models/_models.py +++ b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/models/_models.py @@ -299,6 +299,10 @@ class BigDataPoolResourceInfo(TrackedResource): :type node_count: int :param library_requirements: Library version requirements :type library_requirements: ~azure.mgmt.synapse.models.LibraryRequirements + :param spark_config_properties: Spark configuration file to specify + additional properties + :type spark_config_properties: + ~azure.mgmt.synapse.models.LibraryRequirements :param spark_version: The Apache Spark version. :type spark_version: str :param default_spark_log_folder: The default folder where Spark logs will @@ -334,6 +338,7 @@ class BigDataPoolResourceInfo(TrackedResource): 'spark_events_folder': {'key': 'properties.sparkEventsFolder', 'type': 'str'}, 'node_count': {'key': 'properties.nodeCount', 'type': 'int'}, 'library_requirements': {'key': 'properties.libraryRequirements', 'type': 'LibraryRequirements'}, + 'spark_config_properties': {'key': 'properties.sparkConfigProperties', 'type': 'LibraryRequirements'}, 'spark_version': {'key': 'properties.sparkVersion', 'type': 'str'}, 'default_spark_log_folder': {'key': 'properties.defaultSparkLogFolder', 'type': 'str'}, 'node_size': {'key': 'properties.nodeSize', 'type': 'str'}, @@ -350,6 +355,7 @@ def __init__(self, **kwargs): self.spark_events_folder = kwargs.get('spark_events_folder', None) self.node_count = kwargs.get('node_count', None) self.library_requirements = kwargs.get('library_requirements', None) + self.spark_config_properties = kwargs.get('spark_config_properties', None) self.spark_version = kwargs.get('spark_version', None) self.default_spark_log_folder = kwargs.get('default_spark_log_folder', None) self.node_size = kwargs.get('node_size', None) diff --git a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/models/_models_py3.py b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/models/_models_py3.py index c4f72918ef88..a8ea15ebbf88 100644 --- a/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/models/_models_py3.py +++ b/sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/models/_models_py3.py @@ -299,6 +299,10 @@ class BigDataPoolResourceInfo(TrackedResource): :type node_count: int :param library_requirements: Library version requirements :type library_requirements: ~azure.mgmt.synapse.models.LibraryRequirements + :param spark_config_properties: Spark configuration file to specify + additional properties + :type spark_config_properties: + ~azure.mgmt.synapse.models.LibraryRequirements :param spark_version: The Apache Spark version. :type spark_version: str :param default_spark_log_folder: The default folder where Spark logs will @@ -334,13 +338,14 @@ class BigDataPoolResourceInfo(TrackedResource): 'spark_events_folder': {'key': 'properties.sparkEventsFolder', 'type': 'str'}, 'node_count': {'key': 'properties.nodeCount', 'type': 'int'}, 'library_requirements': {'key': 'properties.libraryRequirements', 'type': 'LibraryRequirements'}, + 'spark_config_properties': {'key': 'properties.sparkConfigProperties', 'type': 'LibraryRequirements'}, 'spark_version': {'key': 'properties.sparkVersion', 'type': 'str'}, 'default_spark_log_folder': {'key': 'properties.defaultSparkLogFolder', 'type': 'str'}, 'node_size': {'key': 'properties.nodeSize', 'type': 'str'}, 'node_size_family': {'key': 'properties.nodeSizeFamily', 'type': 'str'}, } - def __init__(self, *, location: str, tags=None, provisioning_state: str=None, auto_scale=None, creation_date=None, auto_pause=None, is_compute_isolation_enabled: bool=None, spark_events_folder: str=None, node_count: int=None, library_requirements=None, spark_version: str=None, default_spark_log_folder: str=None, node_size=None, node_size_family=None, **kwargs) -> None: + def __init__(self, *, location: str, tags=None, provisioning_state: str=None, auto_scale=None, creation_date=None, auto_pause=None, is_compute_isolation_enabled: bool=None, spark_events_folder: str=None, node_count: int=None, library_requirements=None, spark_config_properties=None, spark_version: str=None, default_spark_log_folder: str=None, node_size=None, node_size_family=None, **kwargs) -> None: super(BigDataPoolResourceInfo, self).__init__(tags=tags, location=location, **kwargs) self.provisioning_state = provisioning_state self.auto_scale = auto_scale @@ -350,6 +355,7 @@ def __init__(self, *, location: str, tags=None, provisioning_state: str=None, au self.spark_events_folder = spark_events_folder self.node_count = node_count self.library_requirements = library_requirements + self.spark_config_properties = spark_config_properties self.spark_version = spark_version self.default_spark_log_folder = default_spark_log_folder self.node_size = node_size