From 2e1b30fd7891979c691bc3fc4368fbcc10aa2dff Mon Sep 17 00:00:00 2001 From: SDKAuto Date: Mon, 14 Mar 2022 05:00:23 +0000 Subject: [PATCH] CodeGen from PR 18153 in Azure/azure-rest-api-specs update bob partition number limitation description (#18153) Co-authored-by: Zhengyi Zhao --- .../azure-mgmt-streamanalytics/_meta.json | 2 +- .../azure/mgmt/streamanalytics/_version.py | 2 +- .../mgmt/streamanalytics/models/__init__.py | 2 + .../streamanalytics/models/_models_py3.py | 146 ++++++++++++++++-- 4 files changed, 140 insertions(+), 12 deletions(-) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/_meta.json b/sdk/streamanalytics/azure-mgmt-streamanalytics/_meta.json index 9ecc2a71e6454..d822e04720f8f 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/_meta.json +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/_meta.json @@ -4,7 +4,7 @@ "@autorest/python@5.12.0", "@autorest/modelerfour@4.19.3" ], - "commit": "0ca8399b0e4fb4ae4608ffd0a6c056213664dacd", + "commit": "f7f8f5bd19939b4a11ea626f266a362b4dd5b626", "repository_url": "https://github.com/Azure/azure-rest-api-specs", "autorest_command": "autorest specification/streamanalytics/resource-manager/readme.md --multiapi --python --python-mode=update --python-sdks-folder=/home/vsts/work/1/s/azure-sdk-for-python/sdk --python3-only --track2 --use=@autorest/python@5.12.0 --use=@autorest/modelerfour@4.19.3 --version=3.7.2", "readme": "specification/streamanalytics/resource-manager/readme.md" diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_version.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_version.py index c47f66669f1bf..e5754a47ce68f 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_version.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "1.0.0" +VERSION = "1.0.0b1" diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py index cc323de0b32e8..eec84ec2ef0dc 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py @@ -9,6 +9,7 @@ from ._models_py3 import AvroSerialization from ._models_py3 import AzureDataLakeStoreOutputDataSource from ._models_py3 import AzureDataLakeStoreOutputDataSourceProperties +from ._models_py3 import AzureFunctionOutputDataSource from ._models_py3 import AzureMachineLearningWebServiceFunctionBinding from ._models_py3 import AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters from ._models_py3 import AzureMachineLearningWebServiceInputColumn @@ -131,6 +132,7 @@ 'AvroSerialization', 'AzureDataLakeStoreOutputDataSource', 'AzureDataLakeStoreOutputDataSourceProperties', + 'AzureFunctionOutputDataSource', 'AzureMachineLearningWebServiceFunctionBinding', 'AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters', 'AzureMachineLearningWebServiceInputColumn', diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py index 49628eb32af44..fb562727eed6c 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py @@ -94,7 +94,7 @@ class OutputDataSource(msrest.serialization.Model): """Describes the data source that output will be written to. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureDataLakeStoreOutputDataSource, EventHubV2OutputDataSource, EventHubOutputDataSource, ServiceBusQueueOutputDataSource, ServiceBusTopicOutputDataSource, AzureSynapseOutputDataSource, AzureSqlDatabaseOutputDataSource, BlobOutputDataSource, DocumentDbOutputDataSource, AzureTableOutputDataSource, PowerBIOutputDataSource. + sub-classes are: AzureFunctionOutputDataSource, AzureDataLakeStoreOutputDataSource, EventHubV2OutputDataSource, EventHubOutputDataSource, ServiceBusQueueOutputDataSource, ServiceBusTopicOutputDataSource, AzureSynapseOutputDataSource, AzureSqlDatabaseOutputDataSource, BlobOutputDataSource, DocumentDbOutputDataSource, AzureTableOutputDataSource, PowerBIOutputDataSource. All required parameters must be populated in order to send to Azure. @@ -112,7 +112,7 @@ class OutputDataSource(msrest.serialization.Model): } _subtype_map = { - 'type': {'Microsoft.DataLake/Accounts': 'AzureDataLakeStoreOutputDataSource', 'Microsoft.EventHub/EventHub': 'EventHubV2OutputDataSource', 'Microsoft.ServiceBus/EventHub': 'EventHubOutputDataSource', 'Microsoft.ServiceBus/Queue': 'ServiceBusQueueOutputDataSource', 'Microsoft.ServiceBus/Topic': 'ServiceBusTopicOutputDataSource', 'Microsoft.Sql/Server/DataWarehouse': 'AzureSynapseOutputDataSource', 'Microsoft.Sql/Server/Database': 'AzureSqlDatabaseOutputDataSource', 'Microsoft.Storage/Blob': 'BlobOutputDataSource', 'Microsoft.Storage/DocumentDB': 'DocumentDbOutputDataSource', 'Microsoft.Storage/Table': 'AzureTableOutputDataSource', 'PowerBI': 'PowerBIOutputDataSource'} + 'type': {'Microsoft.AzureFunction': 'AzureFunctionOutputDataSource', 'Microsoft.DataLake/Accounts': 'AzureDataLakeStoreOutputDataSource', 'Microsoft.EventHub/EventHub': 'EventHubV2OutputDataSource', 'Microsoft.ServiceBus/EventHub': 'EventHubOutputDataSource', 'Microsoft.ServiceBus/Queue': 'ServiceBusQueueOutputDataSource', 'Microsoft.ServiceBus/Topic': 'ServiceBusTopicOutputDataSource', 'Microsoft.Sql/Server/DataWarehouse': 'AzureSynapseOutputDataSource', 'Microsoft.Sql/Server/Database': 'AzureSqlDatabaseOutputDataSource', 'Microsoft.Storage/Blob': 'BlobOutputDataSource', 'Microsoft.Storage/DocumentDB': 'DocumentDbOutputDataSource', 'Microsoft.Storage/Table': 'AzureTableOutputDataSource', 'PowerBI': 'PowerBIOutputDataSource'} } def __init__( @@ -416,6 +416,78 @@ def __init__( self.authentication_mode = authentication_mode +class AzureFunctionOutputDataSource(OutputDataSource): + """Defines the metadata of AzureFunctionOutputDataSource. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Indicates the type of data source output will be written to. Required on + PUT (CreateOrReplace) requests.Constant filled by server. + :vartype type: str + :ivar function_app_name: The name of your Azure Functions app. + :vartype function_app_name: str + :ivar function_name: The name of the function in your Azure Functions app. + :vartype function_name: str + :ivar api_key: If you want to use an Azure Function from another subscription, you can do so by + providing the key to access your function. + :vartype api_key: str + :ivar max_batch_size: A property that lets you set the maximum size for each output batch + that's sent to your Azure function. The input unit is in bytes. By default, this value is + 262,144 bytes (256 KB). + :vartype max_batch_size: float + :ivar max_batch_count: A property that lets you specify the maximum number of events in each + batch that's sent to Azure Functions. The default value is 100. + :vartype max_batch_count: float + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'function_app_name': {'key': 'properties.functionAppName', 'type': 'str'}, + 'function_name': {'key': 'properties.functionName', 'type': 'str'}, + 'api_key': {'key': 'properties.apiKey', 'type': 'str'}, + 'max_batch_size': {'key': 'properties.maxBatchSize', 'type': 'float'}, + 'max_batch_count': {'key': 'properties.maxBatchCount', 'type': 'float'}, + } + + def __init__( + self, + *, + function_app_name: Optional[str] = None, + function_name: Optional[str] = None, + api_key: Optional[str] = None, + max_batch_size: Optional[float] = None, + max_batch_count: Optional[float] = None, + **kwargs + ): + """ + :keyword function_app_name: The name of your Azure Functions app. + :paramtype function_app_name: str + :keyword function_name: The name of the function in your Azure Functions app. + :paramtype function_name: str + :keyword api_key: If you want to use an Azure Function from another subscription, you can do so + by providing the key to access your function. + :paramtype api_key: str + :keyword max_batch_size: A property that lets you set the maximum size for each output batch + that's sent to your Azure function. The input unit is in bytes. By default, this value is + 262,144 bytes (256 KB). + :paramtype max_batch_size: float + :keyword max_batch_count: A property that lets you specify the maximum number of events in each + batch that's sent to Azure Functions. The default value is 100. + :paramtype max_batch_count: float + """ + super(AzureFunctionOutputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.AzureFunction' # type: str + self.function_app_name = function_app_name + self.function_name = function_name + self.api_key = api_key + self.max_batch_size = max_batch_size + self.max_batch_count = max_batch_count + + class FunctionBinding(msrest.serialization.Model): """The physical binding of the function. For example, in the Azure Machine Learning web service’s case, this describes the endpoint. @@ -1452,6 +1524,10 @@ class BlobDataSourceProperties(msrest.serialization.Model): :ivar time_format: The time format. Wherever {time} appears in pathPattern, the value of this property is used as the time format instead. :vartype time_format: str + :ivar authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :vartype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode """ _attribute_map = { @@ -1460,6 +1536,7 @@ class BlobDataSourceProperties(msrest.serialization.Model): 'path_pattern': {'key': 'pathPattern', 'type': 'str'}, 'date_format': {'key': 'dateFormat', 'type': 'str'}, 'time_format': {'key': 'timeFormat', 'type': 'str'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, } def __init__( @@ -1470,6 +1547,7 @@ def __init__( path_pattern: Optional[str] = None, date_format: Optional[str] = None, time_format: Optional[str] = None, + authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, **kwargs ): """ @@ -1493,6 +1571,10 @@ def __init__( :keyword time_format: The time format. Wherever {time} appears in pathPattern, the value of this property is used as the time format instead. :paramtype time_format: str + :keyword authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :paramtype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode """ super(BlobDataSourceProperties, self).__init__(**kwargs) self.storage_accounts = storage_accounts @@ -1500,6 +1582,7 @@ def __init__( self.path_pattern = path_pattern self.date_format = date_format self.time_format = time_format + self.authentication_mode = authentication_mode class BlobOutputDataSource(OutputDataSource): @@ -1672,8 +1755,7 @@ def __init__( :paramtype authentication_mode: str or ~stream_analytics_management_client.models.AuthenticationMode """ - super(BlobOutputDataSourceProperties, self).__init__(storage_accounts=storage_accounts, container=container, path_pattern=path_pattern, date_format=date_format, time_format=time_format, **kwargs) - self.authentication_mode = authentication_mode + super(BlobOutputDataSourceProperties, self).__init__(storage_accounts=storage_accounts, container=container, path_pattern=path_pattern, date_format=date_format, time_format=time_format, authentication_mode=authentication_mode, **kwargs) class BlobReferenceInputDataSource(ReferenceInputDataSource): @@ -1704,6 +1786,10 @@ class BlobReferenceInputDataSource(ReferenceInputDataSource): :ivar time_format: The time format. Wherever {time} appears in pathPattern, the value of this property is used as the time format instead. :vartype time_format: str + :ivar authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :vartype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode """ _validation = { @@ -1717,6 +1803,7 @@ class BlobReferenceInputDataSource(ReferenceInputDataSource): 'path_pattern': {'key': 'properties.pathPattern', 'type': 'str'}, 'date_format': {'key': 'properties.dateFormat', 'type': 'str'}, 'time_format': {'key': 'properties.timeFormat', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, } def __init__( @@ -1727,6 +1814,7 @@ def __init__( path_pattern: Optional[str] = None, date_format: Optional[str] = None, time_format: Optional[str] = None, + authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, **kwargs ): """ @@ -1750,6 +1838,10 @@ def __init__( :keyword time_format: The time format. Wherever {time} appears in pathPattern, the value of this property is used as the time format instead. :paramtype time_format: str + :keyword authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :paramtype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode """ super(BlobReferenceInputDataSource, self).__init__(**kwargs) self.type = 'Microsoft.Storage/Blob' # type: str @@ -1758,6 +1850,7 @@ def __init__( self.path_pattern = path_pattern self.date_format = date_format self.time_format = time_format + self.authentication_mode = authentication_mode class BlobReferenceInputDataSourceProperties(BlobDataSourceProperties): @@ -1783,6 +1876,10 @@ class BlobReferenceInputDataSourceProperties(BlobDataSourceProperties): :ivar time_format: The time format. Wherever {time} appears in pathPattern, the value of this property is used as the time format instead. :vartype time_format: str + :ivar authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :vartype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode """ _attribute_map = { @@ -1791,6 +1888,7 @@ class BlobReferenceInputDataSourceProperties(BlobDataSourceProperties): 'path_pattern': {'key': 'pathPattern', 'type': 'str'}, 'date_format': {'key': 'dateFormat', 'type': 'str'}, 'time_format': {'key': 'timeFormat', 'type': 'str'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, } def __init__( @@ -1801,6 +1899,7 @@ def __init__( path_pattern: Optional[str] = None, date_format: Optional[str] = None, time_format: Optional[str] = None, + authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, **kwargs ): """ @@ -1824,8 +1923,12 @@ def __init__( :keyword time_format: The time format. Wherever {time} appears in pathPattern, the value of this property is used as the time format instead. :paramtype time_format: str + :keyword authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :paramtype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode """ - super(BlobReferenceInputDataSourceProperties, self).__init__(storage_accounts=storage_accounts, container=container, path_pattern=path_pattern, date_format=date_format, time_format=time_format, **kwargs) + super(BlobReferenceInputDataSourceProperties, self).__init__(storage_accounts=storage_accounts, container=container, path_pattern=path_pattern, date_format=date_format, time_format=time_format, authentication_mode=authentication_mode, **kwargs) class StreamInputDataSource(msrest.serialization.Model): @@ -1891,7 +1994,12 @@ class BlobStreamInputDataSource(StreamInputDataSource): :ivar time_format: The time format. Wherever {time} appears in pathPattern, the value of this property is used as the time format instead. :vartype time_format: str - :ivar source_partition_count: The partition count of the blob input data source. Range 1 - 256. + :ivar authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :vartype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + :ivar source_partition_count: The partition count of the blob input data source. Range 1 - + 1024. :vartype source_partition_count: int """ @@ -1906,6 +2014,7 @@ class BlobStreamInputDataSource(StreamInputDataSource): 'path_pattern': {'key': 'properties.pathPattern', 'type': 'str'}, 'date_format': {'key': 'properties.dateFormat', 'type': 'str'}, 'time_format': {'key': 'properties.timeFormat', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, 'source_partition_count': {'key': 'properties.sourcePartitionCount', 'type': 'int'}, } @@ -1917,6 +2026,7 @@ def __init__( path_pattern: Optional[str] = None, date_format: Optional[str] = None, time_format: Optional[str] = None, + authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, source_partition_count: Optional[int] = None, **kwargs ): @@ -1941,8 +2051,12 @@ def __init__( :keyword time_format: The time format. Wherever {time} appears in pathPattern, the value of this property is used as the time format instead. :paramtype time_format: str + :keyword authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :paramtype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode :keyword source_partition_count: The partition count of the blob input data source. Range 1 - - 256. + 1024. :paramtype source_partition_count: int """ super(BlobStreamInputDataSource, self).__init__(**kwargs) @@ -1952,6 +2066,7 @@ def __init__( self.path_pattern = path_pattern self.date_format = date_format self.time_format = time_format + self.authentication_mode = authentication_mode self.source_partition_count = source_partition_count @@ -1978,7 +2093,12 @@ class BlobStreamInputDataSourceProperties(BlobDataSourceProperties): :ivar time_format: The time format. Wherever {time} appears in pathPattern, the value of this property is used as the time format instead. :vartype time_format: str - :ivar source_partition_count: The partition count of the blob input data source. Range 1 - 256. + :ivar authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :vartype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode + :ivar source_partition_count: The partition count of the blob input data source. Range 1 - + 1024. :vartype source_partition_count: int """ @@ -1988,6 +2108,7 @@ class BlobStreamInputDataSourceProperties(BlobDataSourceProperties): 'path_pattern': {'key': 'pathPattern', 'type': 'str'}, 'date_format': {'key': 'dateFormat', 'type': 'str'}, 'time_format': {'key': 'timeFormat', 'type': 'str'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, 'source_partition_count': {'key': 'sourcePartitionCount', 'type': 'int'}, } @@ -1999,6 +2120,7 @@ def __init__( path_pattern: Optional[str] = None, date_format: Optional[str] = None, time_format: Optional[str] = None, + authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, source_partition_count: Optional[int] = None, **kwargs ): @@ -2023,11 +2145,15 @@ def __init__( :keyword time_format: The time format. Wherever {time} appears in pathPattern, the value of this property is used as the time format instead. :paramtype time_format: str + :keyword authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", + "ConnectionString". + :paramtype authentication_mode: str or + ~stream_analytics_management_client.models.AuthenticationMode :keyword source_partition_count: The partition count of the blob input data source. Range 1 - - 256. + 1024. :paramtype source_partition_count: int """ - super(BlobStreamInputDataSourceProperties, self).__init__(storage_accounts=storage_accounts, container=container, path_pattern=path_pattern, date_format=date_format, time_format=time_format, **kwargs) + super(BlobStreamInputDataSourceProperties, self).__init__(storage_accounts=storage_accounts, container=container, path_pattern=path_pattern, date_format=date_format, time_format=time_format, authentication_mode=authentication_mode, **kwargs) self.source_partition_count = source_partition_count