diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py index 3b0e7eea9256..675b5a5e72c0 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py @@ -1872,21 +1872,9 @@ def _start_copy_from_url_options(self, source_url, metadata=None, incremental_co tier = kwargs.pop('premium_page_blob_tier', None) or kwargs.pop('standard_blob_tier', None) requires_sync = kwargs.pop('requires_sync', None) - encryption_scope_str = kwargs.pop('encryption_scope', None) source_authorization = kwargs.pop('source_authorization', None) - - if not requires_sync and encryption_scope_str: - raise ValueError("Encryption_scope is only supported for sync copy, please specify requires_sync=True") if source_authorization and incremental_copy: raise ValueError("Source authorization tokens are not applicable for incremental copying.") - # - # TODO: refactor start_copy_from_url api in _blob_client.py. Call _generated/_blob_operations.py copy_from_url - # when requires_sync=True is set. - # Currently both sync copy and async copy are calling _generated/_blob_operations.py start_copy_from_url. - # As sync copy diverges more from async copy, more problem will surface. - if encryption_scope_str: - headers.update({'x-ms-encryption-scope': encryption_scope_str}) - if requires_sync is True: headers['x-ms-requires-sync'] = str(requires_sync) if source_authorization: @@ -2071,17 +2059,6 @@ def start_copy_from_url(self, source_url, metadata=None, incremental_copy=False, Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is the prefix of the source_authorization string. This option is only available when `incremental_copy` is set to False and `requires_sync` is set to True. - - .. versionadded:: 12.9.0 - - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the sync copied blob. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.10.0 - :returns: A dictionary of copy properties (etag, last_modified, copy_id, copy_status). :rtype: dict[str, Union[str, ~datetime.datetime]] diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_configuration.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_configuration.py index ce8d4369abdc..1c0d9c86c093 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_configuration.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_configuration.py @@ -38,7 +38,7 @@ def __init__( super(AzureBlobStorageConfiguration, self).__init__(**kwargs) self.url = url - self.version = "2020-12-06" + self.version = "2020-10-02" kwargs.setdefault('sdk_moniker', 'azureblobstorage/{}'.format(VERSION)) self._configure(**kwargs) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_configuration.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_configuration.py index c42bf9977c7d..bb5c7494306b 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_configuration.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_configuration.py @@ -33,7 +33,7 @@ def __init__( super(AzureBlobStorageConfiguration, self).__init__(**kwargs) self.url = url - self.version = "2020-12-06" + self.version = "2020-10-02" kwargs.setdefault('sdk_moniker', 'azureblobstorage/{}'.format(VERSION)) self._configure(**kwargs) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py index 46a5ad21ca32..e8ab38248ae5 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py @@ -2139,7 +2139,6 @@ async def copy_from_url( source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None, modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, - cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, **kwargs: Any ) -> None: """The Copy From URL operation copies a blob or an internet resource to a new blob. It will not @@ -2189,8 +2188,6 @@ async def copy_from_url( :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions :param lease_access_conditions: Parameter group. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_scope_info: Parameter group. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None @@ -2212,9 +2209,6 @@ async def copy_from_url( _if_none_match = None _if_tags = None _lease_id = None - _encryption_scope = None - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: @@ -2286,8 +2280,6 @@ async def copy_from_url( header_parameters['x-ms-legal-hold'] = self._serialize.header("legal_hold", legal_hold, 'bool') if copy_source_authorization is not None: header_parameters['x-ms-copy-source-authorization'] = self._serialize.header("copy_source_authorization", copy_source_authorization, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.put(url, query_parameters, header_parameters) @@ -2311,7 +2303,6 @@ async def copy_from_url( response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) if cls: return cls(pipeline_response, None, response_headers) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_container_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_container_operations.py index 17f58d532644..9fe6a1901633 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_container_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_container_operations.py @@ -829,6 +829,7 @@ async def submit_batch( content_type = kwargs.pop("content_type", "application/xml") accept = "application/xml" + multipart_content_type = kwargs.pop("content_type", None) # Construct URL url = self.submit_batch.metadata['url'] # type: ignore path_format_arguments = { @@ -854,7 +855,7 @@ async def submit_batch( header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(body, 'IO', is_xml=True) + body_content = self._serialize.body(body, 'IO') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=True, **kwargs) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_directory_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_directory_operations.py new file mode 100644 index 000000000000..12e49a1e7bd9 --- /dev/null +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_directory_operations.py @@ -0,0 +1,742 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import datetime +from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class DirectoryOperations: + """DirectoryOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.storage.blob.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def create( + self, + timeout: Optional[int] = None, + directory_properties: Optional[str] = None, + posix_permissions: Optional[str] = None, + posix_umask: Optional[str] = None, + request_id_parameter: Optional[str] = None, + directory_http_headers: Optional["_models.DirectoryHttpHeaders"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """Create a directory. By default, the destination is overwritten and if the destination already + exists and has a lease the lease is broken. This operation supports conditional HTTP requests. + For more information, see `Specifying Conditional Headers for Blob Service Operations + `_. + To fail if the destination already exists, use a conditional request with If-None-Match: "*". + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param directory_properties: Optional. User-defined properties to be stored with the file or + directory, in the format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", + where each value is base64 encoded. + :type directory_properties: str + :param posix_permissions: Optional and only valid if Hierarchical Namespace is enabled for the + account. Sets POSIX access permissions for the file owner, the file owning group, and others. + Each class may be granted read, write, or execute permission. The sticky bit is also + supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. + :type posix_permissions: str + :param posix_umask: Only valid if Hierarchical Namespace is enabled for the account. This umask + restricts permission settings for file and directory, and will only be applied when default Acl + does not exist in parent directory. If the umask bit has set, it means that the corresponding + permission will be disabled. Otherwise the corresponding permission will be determined by the + permission. A 4-digit octal notation (e.g. 0022) is supported here. If no umask was specified, + a default umask - 0027 will be used. + :type posix_umask: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param directory_http_headers: Parameter group. + :type directory_http_headers: ~azure.storage.blob.models.DirectoryHttpHeaders + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _cache_control = None + _content_type = None + _content_encoding = None + _content_language = None + _content_disposition = None + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + if directory_http_headers is not None: + _cache_control = directory_http_headers.cache_control + _content_type = directory_http_headers.content_type + _content_encoding = directory_http_headers.content_encoding + _content_language = directory_http_headers.content_language + _content_disposition = directory_http_headers.content_disposition + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + resource = "directory" + accept = "application/xml" + + # Construct URL + url = self.create.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['resource'] = self._serialize.query("resource", resource, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if directory_properties is not None: + header_parameters['x-ms-properties'] = self._serialize.header("directory_properties", directory_properties, 'str') + if posix_permissions is not None: + header_parameters['x-ms-permissions'] = self._serialize.header("posix_permissions", posix_permissions, 'str') + if posix_umask is not None: + header_parameters['x-ms-umask'] = self._serialize.header("posix_umask", posix_umask, 'str') + if _cache_control is not None: + header_parameters['x-ms-cache-control'] = self._serialize.header("cache_control", _cache_control, 'str') + if _content_type is not None: + header_parameters['x-ms-content-type'] = self._serialize.header("content_type", _content_type, 'str') + if _content_encoding is not None: + header_parameters['x-ms-content-encoding'] = self._serialize.header("content_encoding", _content_encoding, 'str') + if _content_language is not None: + header_parameters['x-ms-content-language'] = self._serialize.header("content_language", _content_language, 'str') + if _content_disposition is not None: + header_parameters['x-ms-content-disposition'] = self._serialize.header("content_disposition", _content_disposition, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.DataLakeStorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + create.metadata = {'url': '/{filesystem}/{path}'} # type: ignore + + async def rename( + self, + rename_source: str, + timeout: Optional[int] = None, + marker: Optional[str] = None, + path_rename_mode: Optional[Union[str, "_models.PathRenameMode"]] = None, + directory_properties: Optional[str] = None, + posix_permissions: Optional[str] = None, + posix_umask: Optional[str] = None, + source_lease_id: Optional[str] = None, + request_id_parameter: Optional[str] = None, + directory_http_headers: Optional["_models.DirectoryHttpHeaders"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """Rename a directory. By default, the destination is overwritten and if the destination already + exists and has a lease the lease is broken. This operation supports conditional HTTP requests. + For more information, see `Specifying Conditional Headers for Blob Service Operations + `_. + To fail if the destination already exists, use a conditional request with If-None-Match: "*". + + :param rename_source: The file or directory to be renamed. The value must have the following + format: "/{filesysystem}/{path}". If "x-ms-properties" is specified, the properties will + overwrite the existing properties; otherwise, the existing properties will be preserved. + :type rename_source: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param marker: When renaming a directory, the number of paths that are renamed with each + invocation is limited. If the number of paths to be renamed exceeds this limit, a continuation + token is returned in this response header. When a continuation token is returned in the + response, it must be specified in a subsequent invocation of the rename operation to continue + renaming the directory. + :type marker: str + :param path_rename_mode: Determines the behavior of the rename operation. + :type path_rename_mode: str or ~azure.storage.blob.models.PathRenameMode + :param directory_properties: Optional. User-defined properties to be stored with the file or + directory, in the format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", + where each value is base64 encoded. + :type directory_properties: str + :param posix_permissions: Optional and only valid if Hierarchical Namespace is enabled for the + account. Sets POSIX access permissions for the file owner, the file owning group, and others. + Each class may be granted read, write, or execute permission. The sticky bit is also + supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. + :type posix_permissions: str + :param posix_umask: Only valid if Hierarchical Namespace is enabled for the account. This umask + restricts permission settings for file and directory, and will only be applied when default Acl + does not exist in parent directory. If the umask bit has set, it means that the corresponding + permission will be disabled. Otherwise the corresponding permission will be determined by the + permission. A 4-digit octal notation (e.g. 0022) is supported here. If no umask was specified, + a default umask - 0027 will be used. + :type posix_umask: str + :param source_lease_id: A lease ID for the source path. If specified, the source path must have + an active lease and the lease ID must match. + :type source_lease_id: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param directory_http_headers: Parameter group. + :type directory_http_headers: ~azure.storage.blob.models.DirectoryHttpHeaders + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param source_modified_access_conditions: Parameter group. + :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _cache_control = None + _content_type = None + _content_encoding = None + _content_language = None + _content_disposition = None + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _source_if_modified_since = None + _source_if_unmodified_since = None + _source_if_match = None + _source_if_none_match = None + if directory_http_headers is not None: + _cache_control = directory_http_headers.cache_control + _content_type = directory_http_headers.content_type + _content_encoding = directory_http_headers.content_encoding + _content_language = directory_http_headers.content_language + _content_disposition = directory_http_headers.content_disposition + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + if source_modified_access_conditions is not None: + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_none_match = source_modified_access_conditions.source_if_none_match + accept = "application/xml" + + # Construct URL + url = self.rename.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + if marker is not None: + query_parameters['continuation'] = self._serialize.query("marker", marker, 'str') + if path_rename_mode is not None: + query_parameters['mode'] = self._serialize.query("path_rename_mode", path_rename_mode, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-rename-source'] = self._serialize.header("rename_source", rename_source, 'str') + if directory_properties is not None: + header_parameters['x-ms-properties'] = self._serialize.header("directory_properties", directory_properties, 'str') + if posix_permissions is not None: + header_parameters['x-ms-permissions'] = self._serialize.header("posix_permissions", posix_permissions, 'str') + if posix_umask is not None: + header_parameters['x-ms-umask'] = self._serialize.header("posix_umask", posix_umask, 'str') + if _cache_control is not None: + header_parameters['x-ms-cache-control'] = self._serialize.header("cache_control", _cache_control, 'str') + if _content_type is not None: + header_parameters['x-ms-content-type'] = self._serialize.header("content_type", _content_type, 'str') + if _content_encoding is not None: + header_parameters['x-ms-content-encoding'] = self._serialize.header("content_encoding", _content_encoding, 'str') + if _content_language is not None: + header_parameters['x-ms-content-language'] = self._serialize.header("content_language", _content_language, 'str') + if _content_disposition is not None: + header_parameters['x-ms-content-disposition'] = self._serialize.header("content_disposition", _content_disposition, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if source_lease_id is not None: + header_parameters['x-ms-source-lease-id'] = self._serialize.header("source_lease_id", source_lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _source_if_modified_since is not None: + header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') + if _source_if_unmodified_since is not None: + header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') + if _source_if_match is not None: + header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') + if _source_if_none_match is not None: + header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.DataLakeStorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-continuation']=self._deserialize('str', response.headers.get('x-ms-continuation')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + rename.metadata = {'url': '/{filesystem}/{path}'} # type: ignore + + async def delete( + self, + recursive_directory_delete: bool, + timeout: Optional[int] = None, + marker: Optional[str] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """Deletes the directory. + + :param recursive_directory_delete: If "true", all paths beneath the directory will be deleted. + If "false" and the directory is non-empty, an error occurs. + :type recursive_directory_delete: bool + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param marker: When renaming a directory, the number of paths that are renamed with each + invocation is limited. If the number of paths to be renamed exceeds this limit, a continuation + token is returned in this response header. When a continuation token is returned in the + response, it must be specified in a subsequent invocation of the rename operation to continue + renaming the directory. + :type marker: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + accept = "application/xml" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + query_parameters['recursive'] = self._serialize.query("recursive_directory_delete", recursive_directory_delete, 'bool') + if marker is not None: + query_parameters['continuation'] = self._serialize.query("marker", marker, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.DataLakeStorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-continuation']=self._deserialize('str', response.headers.get('x-ms-continuation')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + delete.metadata = {'url': '/{filesystem}/{path}'} # type: ignore + + async def set_access_control( + self, + timeout: Optional[int] = None, + owner: Optional[str] = None, + group: Optional[str] = None, + posix_permissions: Optional[str] = None, + posix_acl: Optional[str] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """Set the owner, group, permissions, or access control list for a directory. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param owner: Optional. The owner of the blob or directory. + :type owner: str + :param group: Optional. The owning group of the blob or directory. + :type group: str + :param posix_permissions: Optional and only valid if Hierarchical Namespace is enabled for the + account. Sets POSIX access permissions for the file owner, the file owning group, and others. + Each class may be granted read, write, or execute permission. The sticky bit is also + supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. + :type posix_permissions: str + :param posix_acl: Sets POSIX access control rights on files and directories. The value is a + comma-separated list of access control entries. Each access control entry (ACE) consists of a + scope, a type, a user or group identifier, and permissions in the format + "[scope:][type]:[id]:[permissions]". + :type posix_acl: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _if_match = None + _if_none_match = None + _if_modified_since = None + _if_unmodified_since = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + action = "setAccessControl" + accept = "application/xml" + + # Construct URL + url = self.set_access_control.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['action'] = self._serialize.query("action", action, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if owner is not None: + header_parameters['x-ms-owner'] = self._serialize.header("owner", owner, 'str') + if group is not None: + header_parameters['x-ms-group'] = self._serialize.header("group", group, 'str') + if posix_permissions is not None: + header_parameters['x-ms-permissions'] = self._serialize.header("posix_permissions", posix_permissions, 'str') + if posix_acl is not None: + header_parameters['x-ms-acl'] = self._serialize.header("posix_acl", posix_acl, 'str') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.patch(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.DataLakeStorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + + if cls: + return cls(pipeline_response, None, response_headers) + + set_access_control.metadata = {'url': '/{filesystem}/{path}'} # type: ignore + + async def get_access_control( + self, + timeout: Optional[int] = None, + upn: Optional[bool] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """Get the owner, group, permissions, or access control list for a directory. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param upn: Optional. Valid only when Hierarchical Namespace is enabled for the account. If + "true", the identity values returned in the x-ms-owner, x-ms-group, and x-ms-acl response + headers will be transformed from Azure Active Directory Object IDs to User Principal Names. If + "false", the values will be returned as Azure Active Directory Object IDs. The default value is + false. + :type upn: bool + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _if_match = None + _if_none_match = None + _if_modified_since = None + _if_unmodified_since = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + action = "getAccessControl" + accept = "application/xml" + + # Construct URL + url = self.get_access_control.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['action'] = self._serialize.query("action", action, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + if upn is not None: + query_parameters['upn'] = self._serialize.query("upn", upn, 'bool') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.head(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.DataLakeStorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-owner']=self._deserialize('str', response.headers.get('x-ms-owner')) + response_headers['x-ms-group']=self._deserialize('str', response.headers.get('x-ms-group')) + response_headers['x-ms-permissions']=self._deserialize('str', response.headers.get('x-ms-permissions')) + response_headers['x-ms-acl']=self._deserialize('str', response.headers.get('x-ms-acl')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + + if cls: + return cls(pipeline_response, None, response_headers) + + get_access_control.metadata = {'url': '/{filesystem}/{path}'} # type: ignore diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_service_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_service_operations.py index a6592a33f7f2..6c6dab8040b2 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_service_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_service_operations.py @@ -551,6 +551,7 @@ async def submit_batch( content_type = kwargs.pop("content_type", "application/xml") accept = "application/xml" + multipart_content_type = kwargs.pop("content_type", None) # Construct URL url = self.submit_batch.metadata['url'] # type: ignore path_format_arguments = { @@ -575,7 +576,7 @@ async def submit_batch( header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(body, 'IO', is_xml=True) + body_content = self._serialize.body(body, 'IO') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=True, **kwargs) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py index 638b6316fdc7..2597f6f970be 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py @@ -175,7 +175,6 @@ class ListContainersIncludeType(with_metaclass(_CaseInsensitiveEnumMeta, str, En METADATA = "metadata" DELETED = "deleted" - SYSTEM = "system" class PremiumPageBlobAccessTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py index e041221239fa..a7d8341159dc 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py @@ -2160,7 +2160,6 @@ def copy_from_url( source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"] modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] - cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] **kwargs # type: Any ): # type: (...) -> None @@ -2211,8 +2210,6 @@ def copy_from_url( :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions :param lease_access_conditions: Parameter group. :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions - :param cpk_scope_info: Parameter group. - :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None @@ -2234,9 +2231,6 @@ def copy_from_url( _if_none_match = None _if_tags = None _lease_id = None - _encryption_scope = None - if cpk_scope_info is not None: - _encryption_scope = cpk_scope_info.encryption_scope if lease_access_conditions is not None: _lease_id = lease_access_conditions.lease_id if modified_access_conditions is not None: @@ -2308,8 +2302,6 @@ def copy_from_url( header_parameters['x-ms-legal-hold'] = self._serialize.header("legal_hold", legal_hold, 'bool') if copy_source_authorization is not None: header_parameters['x-ms-copy-source-authorization'] = self._serialize.header("copy_source_authorization", copy_source_authorization, 'str') - if _encryption_scope is not None: - header_parameters['x-ms-encryption-scope'] = self._serialize.header("encryption_scope", _encryption_scope, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.put(url, query_parameters, header_parameters) @@ -2333,7 +2325,6 @@ def copy_from_url( response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) - response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) if cls: return cls(pipeline_response, None, response_headers) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_container_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_container_operations.py index 1fdd911abb21..108c1163b922 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_container_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_container_operations.py @@ -842,6 +842,7 @@ def submit_batch( content_type = kwargs.pop("content_type", "application/xml") accept = "application/xml" + multipart_content_type = kwargs.pop("content_type", None) # Construct URL url = self.submit_batch.metadata['url'] # type: ignore path_format_arguments = { @@ -867,7 +868,7 @@ def submit_batch( header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(body, 'IO', is_xml=True) + body_content = self._serialize.body(body, 'IO') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=True, **kwargs) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_directory_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_directory_operations.py new file mode 100644 index 000000000000..0ebb32d05eed --- /dev/null +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_directory_operations.py @@ -0,0 +1,751 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import datetime +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class DirectoryOperations(object): + """DirectoryOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.storage.blob.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def create( + self, + timeout=None, # type: Optional[int] + directory_properties=None, # type: Optional[str] + posix_permissions=None, # type: Optional[str] + posix_umask=None, # type: Optional[str] + request_id_parameter=None, # type: Optional[str] + directory_http_headers=None, # type: Optional["_models.DirectoryHttpHeaders"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """Create a directory. By default, the destination is overwritten and if the destination already + exists and has a lease the lease is broken. This operation supports conditional HTTP requests. + For more information, see `Specifying Conditional Headers for Blob Service Operations + `_. + To fail if the destination already exists, use a conditional request with If-None-Match: "*". + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param directory_properties: Optional. User-defined properties to be stored with the file or + directory, in the format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", + where each value is base64 encoded. + :type directory_properties: str + :param posix_permissions: Optional and only valid if Hierarchical Namespace is enabled for the + account. Sets POSIX access permissions for the file owner, the file owning group, and others. + Each class may be granted read, write, or execute permission. The sticky bit is also + supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. + :type posix_permissions: str + :param posix_umask: Only valid if Hierarchical Namespace is enabled for the account. This umask + restricts permission settings for file and directory, and will only be applied when default Acl + does not exist in parent directory. If the umask bit has set, it means that the corresponding + permission will be disabled. Otherwise the corresponding permission will be determined by the + permission. A 4-digit octal notation (e.g. 0022) is supported here. If no umask was specified, + a default umask - 0027 will be used. + :type posix_umask: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param directory_http_headers: Parameter group. + :type directory_http_headers: ~azure.storage.blob.models.DirectoryHttpHeaders + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _cache_control = None + _content_type = None + _content_encoding = None + _content_language = None + _content_disposition = None + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + if directory_http_headers is not None: + _cache_control = directory_http_headers.cache_control + _content_type = directory_http_headers.content_type + _content_encoding = directory_http_headers.content_encoding + _content_language = directory_http_headers.content_language + _content_disposition = directory_http_headers.content_disposition + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + resource = "directory" + accept = "application/xml" + + # Construct URL + url = self.create.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['resource'] = self._serialize.query("resource", resource, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if directory_properties is not None: + header_parameters['x-ms-properties'] = self._serialize.header("directory_properties", directory_properties, 'str') + if posix_permissions is not None: + header_parameters['x-ms-permissions'] = self._serialize.header("posix_permissions", posix_permissions, 'str') + if posix_umask is not None: + header_parameters['x-ms-umask'] = self._serialize.header("posix_umask", posix_umask, 'str') + if _cache_control is not None: + header_parameters['x-ms-cache-control'] = self._serialize.header("cache_control", _cache_control, 'str') + if _content_type is not None: + header_parameters['x-ms-content-type'] = self._serialize.header("content_type", _content_type, 'str') + if _content_encoding is not None: + header_parameters['x-ms-content-encoding'] = self._serialize.header("content_encoding", _content_encoding, 'str') + if _content_language is not None: + header_parameters['x-ms-content-language'] = self._serialize.header("content_language", _content_language, 'str') + if _content_disposition is not None: + header_parameters['x-ms-content-disposition'] = self._serialize.header("content_disposition", _content_disposition, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.DataLakeStorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + create.metadata = {'url': '/{filesystem}/{path}'} # type: ignore + + def rename( + self, + rename_source, # type: str + timeout=None, # type: Optional[int] + marker=None, # type: Optional[str] + path_rename_mode=None, # type: Optional[Union[str, "_models.PathRenameMode"]] + directory_properties=None, # type: Optional[str] + posix_permissions=None, # type: Optional[str] + posix_umask=None, # type: Optional[str] + source_lease_id=None, # type: Optional[str] + request_id_parameter=None, # type: Optional[str] + directory_http_headers=None, # type: Optional["_models.DirectoryHttpHeaders"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """Rename a directory. By default, the destination is overwritten and if the destination already + exists and has a lease the lease is broken. This operation supports conditional HTTP requests. + For more information, see `Specifying Conditional Headers for Blob Service Operations + `_. + To fail if the destination already exists, use a conditional request with If-None-Match: "*". + + :param rename_source: The file or directory to be renamed. The value must have the following + format: "/{filesysystem}/{path}". If "x-ms-properties" is specified, the properties will + overwrite the existing properties; otherwise, the existing properties will be preserved. + :type rename_source: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param marker: When renaming a directory, the number of paths that are renamed with each + invocation is limited. If the number of paths to be renamed exceeds this limit, a continuation + token is returned in this response header. When a continuation token is returned in the + response, it must be specified in a subsequent invocation of the rename operation to continue + renaming the directory. + :type marker: str + :param path_rename_mode: Determines the behavior of the rename operation. + :type path_rename_mode: str or ~azure.storage.blob.models.PathRenameMode + :param directory_properties: Optional. User-defined properties to be stored with the file or + directory, in the format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", + where each value is base64 encoded. + :type directory_properties: str + :param posix_permissions: Optional and only valid if Hierarchical Namespace is enabled for the + account. Sets POSIX access permissions for the file owner, the file owning group, and others. + Each class may be granted read, write, or execute permission. The sticky bit is also + supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. + :type posix_permissions: str + :param posix_umask: Only valid if Hierarchical Namespace is enabled for the account. This umask + restricts permission settings for file and directory, and will only be applied when default Acl + does not exist in parent directory. If the umask bit has set, it means that the corresponding + permission will be disabled. Otherwise the corresponding permission will be determined by the + permission. A 4-digit octal notation (e.g. 0022) is supported here. If no umask was specified, + a default umask - 0027 will be used. + :type posix_umask: str + :param source_lease_id: A lease ID for the source path. If specified, the source path must have + an active lease and the lease ID must match. + :type source_lease_id: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param directory_http_headers: Parameter group. + :type directory_http_headers: ~azure.storage.blob.models.DirectoryHttpHeaders + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param source_modified_access_conditions: Parameter group. + :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _cache_control = None + _content_type = None + _content_encoding = None + _content_language = None + _content_disposition = None + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _source_if_modified_since = None + _source_if_unmodified_since = None + _source_if_match = None + _source_if_none_match = None + if directory_http_headers is not None: + _cache_control = directory_http_headers.cache_control + _content_type = directory_http_headers.content_type + _content_encoding = directory_http_headers.content_encoding + _content_language = directory_http_headers.content_language + _content_disposition = directory_http_headers.content_disposition + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + if source_modified_access_conditions is not None: + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_none_match = source_modified_access_conditions.source_if_none_match + accept = "application/xml" + + # Construct URL + url = self.rename.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + if marker is not None: + query_parameters['continuation'] = self._serialize.query("marker", marker, 'str') + if path_rename_mode is not None: + query_parameters['mode'] = self._serialize.query("path_rename_mode", path_rename_mode, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-rename-source'] = self._serialize.header("rename_source", rename_source, 'str') + if directory_properties is not None: + header_parameters['x-ms-properties'] = self._serialize.header("directory_properties", directory_properties, 'str') + if posix_permissions is not None: + header_parameters['x-ms-permissions'] = self._serialize.header("posix_permissions", posix_permissions, 'str') + if posix_umask is not None: + header_parameters['x-ms-umask'] = self._serialize.header("posix_umask", posix_umask, 'str') + if _cache_control is not None: + header_parameters['x-ms-cache-control'] = self._serialize.header("cache_control", _cache_control, 'str') + if _content_type is not None: + header_parameters['x-ms-content-type'] = self._serialize.header("content_type", _content_type, 'str') + if _content_encoding is not None: + header_parameters['x-ms-content-encoding'] = self._serialize.header("content_encoding", _content_encoding, 'str') + if _content_language is not None: + header_parameters['x-ms-content-language'] = self._serialize.header("content_language", _content_language, 'str') + if _content_disposition is not None: + header_parameters['x-ms-content-disposition'] = self._serialize.header("content_disposition", _content_disposition, 'str') + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if source_lease_id is not None: + header_parameters['x-ms-source-lease-id'] = self._serialize.header("source_lease_id", source_lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _source_if_modified_since is not None: + header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123') + if _source_if_unmodified_since is not None: + header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123') + if _source_if_match is not None: + header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str') + if _source_if_none_match is not None: + header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.DataLakeStorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-continuation']=self._deserialize('str', response.headers.get('x-ms-continuation')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + rename.metadata = {'url': '/{filesystem}/{path}'} # type: ignore + + def delete( + self, + recursive_directory_delete, # type: bool + timeout=None, # type: Optional[int] + marker=None, # type: Optional[str] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes the directory. + + :param recursive_directory_delete: If "true", all paths beneath the directory will be deleted. + If "false" and the directory is non-empty, an error occurs. + :type recursive_directory_delete: bool + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param marker: When renaming a directory, the number of paths that are renamed with each + invocation is limited. If the number of paths to be renamed exceeds this limit, a continuation + token is returned in this response header. When a continuation token is returned in the + response, it must be specified in a subsequent invocation of the rename operation to continue + renaming the directory. + :type marker: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + accept = "application/xml" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + query_parameters['recursive'] = self._serialize.query("recursive_directory_delete", recursive_directory_delete, 'bool') + if marker is not None: + query_parameters['continuation'] = self._serialize.query("marker", marker, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.DataLakeStorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-continuation']=self._deserialize('str', response.headers.get('x-ms-continuation')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + delete.metadata = {'url': '/{filesystem}/{path}'} # type: ignore + + def set_access_control( + self, + timeout=None, # type: Optional[int] + owner=None, # type: Optional[str] + group=None, # type: Optional[str] + posix_permissions=None, # type: Optional[str] + posix_acl=None, # type: Optional[str] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """Set the owner, group, permissions, or access control list for a directory. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param owner: Optional. The owner of the blob or directory. + :type owner: str + :param group: Optional. The owning group of the blob or directory. + :type group: str + :param posix_permissions: Optional and only valid if Hierarchical Namespace is enabled for the + account. Sets POSIX access permissions for the file owner, the file owning group, and others. + Each class may be granted read, write, or execute permission. The sticky bit is also + supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. + :type posix_permissions: str + :param posix_acl: Sets POSIX access control rights on files and directories. The value is a + comma-separated list of access control entries. Each access control entry (ACE) consists of a + scope, a type, a user or group identifier, and permissions in the format + "[scope:][type]:[id]:[permissions]". + :type posix_acl: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _if_match = None + _if_none_match = None + _if_modified_since = None + _if_unmodified_since = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + action = "setAccessControl" + accept = "application/xml" + + # Construct URL + url = self.set_access_control.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['action'] = self._serialize.query("action", action, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if owner is not None: + header_parameters['x-ms-owner'] = self._serialize.header("owner", owner, 'str') + if group is not None: + header_parameters['x-ms-group'] = self._serialize.header("group", group, 'str') + if posix_permissions is not None: + header_parameters['x-ms-permissions'] = self._serialize.header("posix_permissions", posix_permissions, 'str') + if posix_acl is not None: + header_parameters['x-ms-acl'] = self._serialize.header("posix_acl", posix_acl, 'str') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.patch(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.DataLakeStorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + + if cls: + return cls(pipeline_response, None, response_headers) + + set_access_control.metadata = {'url': '/{filesystem}/{path}'} # type: ignore + + def get_access_control( + self, + timeout=None, # type: Optional[int] + upn=None, # type: Optional[bool] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """Get the owner, group, permissions, or access control list for a directory. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. + :type timeout: int + :param upn: Optional. Valid only when Hierarchical Namespace is enabled for the account. If + "true", the identity values returned in the x-ms-owner, x-ms-group, and x-ms-acl response + headers will be transformed from Azure Active Directory Object IDs to User Principal Names. If + "false", the values will be returned as Azure Active Directory Object IDs. The default value is + false. + :type upn: bool + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _if_match = None + _if_none_match = None + _if_modified_since = None + _if_unmodified_since = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + action = "getAccessControl" + accept = "application/xml" + + # Construct URL + url = self.get_access_control.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['action'] = self._serialize.query("action", action, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + if upn is not None: + query_parameters['upn'] = self._serialize.query("upn", upn, 'bool') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if _lease_id is not None: + header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str') + if _if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str') + if _if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str') + if _if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123') + if _if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.head(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.DataLakeStorageError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-owner']=self._deserialize('str', response.headers.get('x-ms-owner')) + response_headers['x-ms-group']=self._deserialize('str', response.headers.get('x-ms-group')) + response_headers['x-ms-permissions']=self._deserialize('str', response.headers.get('x-ms-permissions')) + response_headers['x-ms-acl']=self._deserialize('str', response.headers.get('x-ms-acl')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + + if cls: + return cls(pipeline_response, None, response_headers) + + get_access_control.metadata = {'url': '/{filesystem}/{path}'} # type: ignore diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_service_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_service_operations.py index 63628418ddd5..2c78b69a81cb 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_service_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_service_operations.py @@ -562,6 +562,7 @@ def submit_batch( content_type = kwargs.pop("content_type", "application/xml") accept = "application/xml" + multipart_content_type = kwargs.pop("content_type", None) # Construct URL url = self.submit_batch.metadata['url'] # type: ignore path_format_arguments = { @@ -586,7 +587,7 @@ def submit_batch( header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(body, 'IO', is_xml=True) + body_content = self._serialize.body(body, 'IO') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=True, **kwargs) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_models.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_models.py index 724e1892305c..e13b085e6cb5 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_models.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_models.py @@ -920,8 +920,6 @@ class ContainerSasPermissions(object): :keyword bool set_immutability_policy: To enable operations related to set/delete immutability policy. To get immutability policy, you just need read permission. - :keyword bool permanent_delete: - To enable permanent delete on the blob is permitted. """ def __init__(self, read=False, write=False, delete=False, list=False, delete_previous_version=False, tag=False, **kwargs): # pylint: disable=redefined-builtin @@ -930,14 +928,12 @@ def __init__(self, read=False, write=False, delete=False, self.delete = delete self.list = list self.delete_previous_version = delete_previous_version - self.permanent_delete = kwargs.pop('permanent_delete', False) self.tag = tag self.set_immutability_policy = kwargs.pop('set_immutability_policy', False) self._str = (('r' if self.read else '') + ('w' if self.write else '') + ('d' if self.delete else '') + ('x' if self.delete_previous_version else '') + - ('y' if self.permanent_delete else '') + ('l' if self.list else '') + ('t' if self.tag else '') + ('i' if self.set_immutability_policy else '')) @@ -963,12 +959,11 @@ def from_string(cls, permission): p_delete = 'd' in permission p_list = 'l' in permission p_delete_previous_version = 'x' in permission - p_permanent_delete = 'y' in permission p_tag = 't' in permission p_set_immutability_policy = 'i' in permission parsed = cls(read=p_read, write=p_write, delete=p_delete, list=p_list, delete_previous_version=p_delete_previous_version, tag=p_tag, - set_immutability_policy=p_set_immutability_policy, permanent_delete=p_permanent_delete) + set_immutability_policy=p_set_immutability_policy) return parsed @@ -997,8 +992,6 @@ class BlobSasPermissions(object): :keyword bool set_immutability_policy: To enable operations related to set/delete immutability policy. To get immutability policy, you just need read permission. - :keyword bool permanent_delete: - To enable permanent delete on the blob is permitted. """ def __init__(self, read=False, add=False, create=False, write=False, delete=False, delete_previous_version=False, tag=True, **kwargs): @@ -1008,7 +1001,6 @@ def __init__(self, read=False, add=False, create=False, write=False, self.write = write self.delete = delete self.delete_previous_version = delete_previous_version - self.permanent_delete = kwargs.pop('permanent_delete', False) self.tag = tag self.set_immutability_policy = kwargs.pop('set_immutability_policy', False) self._str = (('r' if self.read else '') + @@ -1017,7 +1009,6 @@ def __init__(self, read=False, add=False, create=False, write=False, ('w' if self.write else '') + ('d' if self.delete else '') + ('x' if self.delete_previous_version else '') + - ('y' if self.permanent_delete else '') + ('t' if self.tag else '') + ('i' if self.set_immutability_policy else '')) @@ -1043,13 +1034,12 @@ def from_string(cls, permission): p_write = 'w' in permission p_delete = 'd' in permission p_delete_previous_version = 'x' in permission - p_permanent_delete = 'y' in permission p_tag = 't' in permission p_set_immutability_policy = 'i' in permission parsed = cls(read=p_read, add=p_add, create=p_create, write=p_write, delete=p_delete, delete_previous_version=p_delete_previous_version, tag=p_tag, - set_immutability_policy=p_set_immutability_policy, permanent_delete=p_permanent_delete) + set_immutability_policy=p_set_immutability_policy) return parsed diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py index 84ed536f3546..d44c5ade481b 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py @@ -39,8 +39,7 @@ '2020-04-08', '2020-06-12', '2020-08-04', - '2020-10-02', - '2020-12-06' + '2020-10-02' ] diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/models.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/models.py index 22e7b7522ae9..6f6052a642f3 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/models.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/models.py @@ -331,9 +331,6 @@ class AccountSasPermissions(object): :keyword bool set_immutability_policy: To enable operations related to set/delete immutability policy. To get immutability policy, you just need read permission. - :keyword bool permanent_delete: - To enable permanent delete on the blob is permitted. - Valid for Object resource type of Blob only. """ def __init__(self, read=False, write=False, delete=False, list=False, # pylint: disable=redefined-builtin @@ -342,7 +339,6 @@ def __init__(self, read=False, write=False, delete=False, self.write = write self.delete = delete self.delete_previous_version = delete_previous_version - self.permanent_delete = kwargs.pop('permanent_delete', False) self.list = list self.add = add self.create = create @@ -355,7 +351,6 @@ def __init__(self, read=False, write=False, delete=False, ('w' if self.write else '') + ('d' if self.delete else '') + ('x' if self.delete_previous_version else '') + - ('y' if self.permanent_delete else '') + ('l' if self.list else '') + ('a' if self.add else '') + ('c' if self.create else '') + @@ -386,7 +381,6 @@ def from_string(cls, permission): p_write = 'w' in permission p_delete = 'd' in permission p_delete_previous_version = 'x' in permission - p_permanent_delete = 'y' in permission p_list = 'l' in permission p_add = 'a' in permission p_create = 'c' in permission @@ -397,8 +391,7 @@ def from_string(cls, permission): p_set_immutability_policy = 'i' in permission parsed = cls(read=p_read, write=p_write, delete=p_delete, delete_previous_version=p_delete_previous_version, list=p_list, add=p_add, create=p_create, update=p_update, process=p_process, tag=p_tag, - filter_by_tags=p_filter_by_tags, set_immutability_policy=p_set_immutability_policy, - permanent_delete=p_permanent_delete) + filter_by_tags=p_filter_by_tags, set_immutability_policy=p_set_immutability_policy) return parsed diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/shared_access_signature.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/shared_access_signature.py index d2ebfc4b8095..07aad5ffa1c8 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/shared_access_signature.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/shared_access_signature.py @@ -39,9 +39,6 @@ class QueryStringConstants(object): SIGNED_KEY_SERVICE = 'sks' SIGNED_KEY_VERSION = 'skv' - # for blob only - SIGNED_ENCRYPTION_SCOPE = 'ses' - # for ADLS SIGNED_AUTHORIZED_OID = 'saoid' SIGNED_UNAUTHORIZED_OID = 'suoid' @@ -77,8 +74,6 @@ def to_list(): QueryStringConstants.SIGNED_KEY_EXPIRY, QueryStringConstants.SIGNED_KEY_SERVICE, QueryStringConstants.SIGNED_KEY_VERSION, - # for blob only - QueryStringConstants.SIGNED_ENCRYPTION_SCOPE, # for ADLS QueryStringConstants.SIGNED_AUTHORIZED_OID, QueryStringConstants.SIGNED_UNAUTHORIZED_OID, @@ -109,7 +104,7 @@ def __init__(self, account_name, account_key, x_ms_version=X_MS_VERSION): self.x_ms_version = x_ms_version def generate_account(self, services, resource_types, permission, expiry, start=None, - ip=None, protocol=None, **kwargs): + ip=None, protocol=None): ''' Generates a shared access signature for the account. Use the returned signature with the sas_token parameter of the service @@ -154,7 +149,6 @@ def generate_account(self, services, resource_types, permission, expiry, start=N sas = _SharedAccessHelper() sas.add_base(permission, expiry, start, ip, protocol, self.x_ms_version) sas.add_account(services, resource_types) - sas.add_encryption_scope(**kwargs) sas.add_account_signature(self.account_name, self.account_key) return sas.get_token() @@ -168,9 +162,6 @@ def _add_query(self, name, val): if val: self.query_dict[name] = _str(val) if val is not None else None - def add_encryption_scope(self, **kwargs): - self._add_query(QueryStringConstants.SIGNED_ENCRYPTION_SCOPE, kwargs.pop('encryption_scope', None)) - def add_base(self, permission, expiry, start, ip, protocol, x_ms_version): if isinstance(start, date): start = _to_utc_datetime(start) @@ -220,8 +211,7 @@ def get_value_to_append(query): get_value_to_append(QueryStringConstants.SIGNED_EXPIRY) + get_value_to_append(QueryStringConstants.SIGNED_IP) + get_value_to_append(QueryStringConstants.SIGNED_PROTOCOL) + - get_value_to_append(QueryStringConstants.SIGNED_VERSION) + - get_value_to_append(QueryStringConstants.SIGNED_ENCRYPTION_SCOPE)) + get_value_to_append(QueryStringConstants.SIGNED_VERSION)) self._add_query(QueryStringConstants.SIGNED_SIGNATURE, sign_string(account_key, string_to_sign)) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared_access_signature.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared_access_signature.py index c2bbbe0b7776..890ef1b2eeac 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared_access_signature.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared_access_signature.py @@ -10,7 +10,7 @@ from ._shared import sign_string, url_quote from ._shared.constants import X_MS_VERSION -from ._shared.models import Services, UserDelegationKey +from ._shared.models import Services from ._shared.shared_access_signature import SharedAccessSignature, _SharedAccessHelper, \ QueryStringConstants @@ -19,6 +19,7 @@ from ..blob import ( ResourceTypes, AccountSasPermissions, + UserDelegationKey, ContainerSasPermissions, BlobSasPermissions ) @@ -132,7 +133,6 @@ def generate_blob(self, container_name, blob_name, snapshot=None, version_id=Non sas.add_override_response_headers(cache_control, content_disposition, content_encoding, content_language, content_type) - sas.add_encryption_scope(**kwargs) sas.add_info_for_hns_account(**kwargs) sas.add_resource_signature(self.account_name, self.account_key, resource_path, user_delegation_key=self.user_delegation_key) @@ -208,7 +208,6 @@ def generate_container(self, container_name, permission=None, expiry=None, sas.add_override_response_headers(cache_control, content_disposition, content_encoding, content_language, content_type) - sas.add_encryption_scope(**kwargs) sas.add_info_for_hns_account(**kwargs) sas.add_resource_signature(self.account_name, self.account_key, container_name, user_delegation_key=self.user_delegation_key) @@ -272,7 +271,6 @@ def add_resource_signature(self, account_name, account_key, path, user_delegatio self.get_value_to_append(QueryStringConstants.SIGNED_VERSION) + self.get_value_to_append(QueryStringConstants.SIGNED_RESOURCE) + self.get_value_to_append(BlobQueryStringConstants.SIGNED_TIMESTAMP) + - self.get_value_to_append(QueryStringConstants.SIGNED_ENCRYPTION_SCOPE) + self.get_value_to_append(QueryStringConstants.SIGNED_CACHE_CONTROL) + self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_DISPOSITION) + self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_ENCODING) + @@ -347,8 +345,6 @@ def generate_account_sas( restricts the request to those IP addresses. :keyword str protocol: Specifies the protocol permitted for a request made. The default value is https. - :keyword str encryption_scope: - Specifies the encryption scope for a request made so that all write operations will be service encrypted. :return: A Shared Access Signature (sas) token. :rtype: str @@ -453,8 +449,6 @@ def generate_container_sas( :keyword str content_type: Response header value for Content-Type when resource is accessed using this shared access signature. - :keyword str encryption_scope: - Specifies the encryption scope for a request made so that all write operations will be service encrypted. :return: A Shared Access Signature (sas) token. :rtype: str @@ -469,8 +463,7 @@ def generate_container_sas( """ if not user_delegation_key and not account_key: raise ValueError("Either user_delegation_key or account_key must be provided.") - if isinstance(account_key, UserDelegationKey): - user_delegation_key = account_key + if user_delegation_key: sas = BlobSharedAccessSignature(account_name, user_delegation_key=user_delegation_key) else: @@ -577,15 +570,11 @@ def generate_blob_sas( :keyword str content_type: Response header value for Content-Type when resource is accessed using this shared access signature. - :keyword str encryption_scope: - Specifies the encryption scope for a request made so that all write operations will be service encrypted. :return: A Shared Access Signature (sas) token. :rtype: str """ if not user_delegation_key and not account_key: raise ValueError("Either user_delegation_key or account_key must be provided.") - if isinstance(account_key, UserDelegationKey): - user_delegation_key = account_key version_id = kwargs.pop('version_id', None) if version_id and snapshot: raise ValueError("snapshot and version_id cannot be set at the same time.") diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py index e36433460865..5ecaaff43a09 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py @@ -1275,17 +1275,6 @@ async def start_copy_from_url(self, source_url, metadata=None, incremental_copy= Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is the prefix of the source_authorization string. This option is only available when `incremental_copy` is set to False and `requires_sync` is set to True. - - .. versionadded:: 12.9.0 - - :keyword str encryption_scope: - A predefined encryption scope used to encrypt the data on the sync copied blob. An encryption - scope can be created using the Management API and referenced here by name. If a default - encryption scope has been defined at the container, this value will override it if the - container-level scope is configured to allow overrides. Otherwise an error will be raised. - - .. versionadded:: 12.10.0 - :returns: A dictionary of copy properties (etag, last_modified, copy_id, copy_status). :rtype: dict[str, Union[str, ~datetime.datetime]] diff --git a/sdk/storage/azure-storage-blob/swagger/README.md b/sdk/storage/azure-storage-blob/swagger/README.md index 52148bcc5dba..330862cb09e8 100644 --- a/sdk/storage/azure-storage-blob/swagger/README.md +++ b/sdk/storage/azure-storage-blob/swagger/README.md @@ -16,7 +16,7 @@ autorest --v3 --python ### Settings ``` yaml -input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/storage-dataplane-preview/specification/storage/data-plane/Microsoft.BlobStorage/preview/2020-12-06/blob.json +input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/storage-dataplane-preview/specification/storage/data-plane/Microsoft.BlobStorage/preview/2020-10-02/blob.json output-folder: ../azure/storage/blob/_generated namespace: azure.storage.blob no-namespace-folders: true diff --git a/sdk/storage/azure-storage-blob/tests/test_common_blob.py b/sdk/storage/azure-storage-blob/tests/test_common_blob.py index 4359d3ffdf9c..1872a620754d 100644 --- a/sdk/storage/azure-storage-blob/tests/test_common_blob.py +++ b/sdk/storage/azure-storage-blob/tests/test_common_blob.py @@ -1706,16 +1706,13 @@ def test_sas_access_blob_snapshot(self, storage_account_name, storage_account_ke blob_snapshot = blob_client.create_snapshot() blob_snapshot_client = self.bsc.get_blob_client(self.container_name, blob_name, snapshot=blob_snapshot) - permission = BlobSasPermissions(read=True, write=True, delete=True, delete_previous_version=True, - permanent_delete=True, list=True, add=True, create=True, update=True) - self.assertIn('y', str(permission)) token = generate_blob_sas( blob_snapshot_client.account_name, blob_snapshot_client.container_name, blob_snapshot_client.blob_name, snapshot=blob_snapshot_client.snapshot, account_key=blob_snapshot_client.credential.account_key, - permission=permission, + permission=BlobSasPermissions(read=True, delete=True), expiry=datetime.utcnow() + timedelta(hours=1), ) @@ -1889,15 +1886,11 @@ def test_account_sas_credential(self, storage_account_name, storage_account_key) self._setup(storage_account_name, storage_account_key) blob_name = self._create_block_blob() - account_sas_permission = AccountSasPermissions(read=True, write=True, delete=True, add=True, - permanent_delete=True, list=True) - self.assertIn('y', str(account_sas_permission)) - token = generate_account_sas( self.bsc.account_name, self.bsc.credential.account_key, ResourceTypes(container=True, object=True), - account_sas_permission, + AccountSasPermissions(read=True), datetime.utcnow() + timedelta(hours=1), ) @@ -1965,6 +1958,7 @@ def test_user_delegation_sas_for_blob(self, storage_account_name, storage_accoun blob_client.container_name, blob_client.blob_name, snapshot=blob_client.snapshot, + account_key=storage_account_key, permission=BlobSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(hours=1), user_delegation_key=user_delegation_key, @@ -2192,15 +2186,11 @@ def test_get_account_information_with_container_sas(self, storage_account_name, self._setup(storage_account_name, storage_account_key) container = self.bsc.get_container_client(self.container_name) - permission = ContainerSasPermissions(read=True, write=True, delete=True, delete_previous_version=True, - list=True, tag=True, set_immutability_policy=True, - permanent_delete=True) - self.assertIn('y', str(permission)) token = generate_container_sas( container.account_name, container.container_name, account_key=container.credential.account_key, - permission=permission, + permission=ContainerSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(hours=1), ) sas_container = ContainerClient.from_container_url(container.url, credential=token) diff --git a/sdk/storage/azure-storage-blob/tests/test_common_blob_async.py b/sdk/storage/azure-storage-blob/tests/test_common_blob_async.py index 431218806fc4..a3b5fcefd3dc 100644 --- a/sdk/storage/azure-storage-blob/tests/test_common_blob_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_common_blob_async.py @@ -1873,17 +1873,13 @@ async def test_sas_access_blob(self, storage_account_name, storage_account_key): blob_name = await self._create_block_blob() blob = self.bsc.get_blob_client(self.container_name, blob_name) - permission = BlobSasPermissions(read=True, write=True, delete=True, delete_previous_version=True, - permanent_delete=True, list=True, add=True, create=True, update=True) - self.assertIn('y', str(permission)) - token = generate_blob_sas( blob.account_name, blob.container_name, blob.blob_name, snapshot=blob.snapshot, account_key=blob.credential.account_key, - permission=permission, + permission=BlobSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(hours=1), ) @@ -1941,15 +1937,11 @@ async def test_account_sas(self, storage_account_name, storage_account_key): await self._setup(storage_account_name, storage_account_key) blob_name = await self._create_block_blob() - account_sas_permission = AccountSasPermissions(read=True, write=True, delete=True, add=True, - permanent_delete=True, list=True) - self.assertIn('y', str(account_sas_permission)) - token = generate_account_sas( self.bsc.account_name, self.bsc.credential.account_key, ResourceTypes(container=True, object=True), - account_sas_permission, + AccountSasPermissions(read=True), datetime.utcnow() + timedelta(hours=1), ) @@ -2225,15 +2217,11 @@ async def test_get_account_information_with_container_sas(self, storage_account_ # Arrange await self._setup(storage_account_name, storage_account_key) container = self.bsc.get_container_client(self.container_name) - permission = ContainerSasPermissions(read=True, write=True, delete=True, delete_previous_version=True, - list=True, tag=True, set_immutability_policy=True, - permanent_delete=True) - self.assertIn('y', str(permission)) token = generate_container_sas( container.account_name, container.container_name, account_key=container.credential.account_key, - permission=permission, + permission=ContainerSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(hours=1), ) sas_container = ContainerClient.from_container_url(container.url, credential=token) diff --git a/sdk/storage/azure-storage-blob/tests/test_cpk_n.py b/sdk/storage/azure-storage-blob/tests/test_cpk_n.py index d7bdd9b5f6f8..28013f6496a6 100644 --- a/sdk/storage/azure-storage-blob/tests/test_cpk_n.py +++ b/sdk/storage/azure-storage-blob/tests/test_cpk_n.py @@ -15,8 +15,7 @@ BlobBlock, BlobSasPermissions, ContainerEncryptionScope, - generate_blob_sas, - generate_account_sas, ResourceTypes, AccountSasPermissions, generate_container_sas, ContainerSasPermissions + generate_blob_sas ) from settings.testcase import BlobPreparer from devtools_testutils.storage import StorageTestCase @@ -31,8 +30,6 @@ "default_encryption_scope": "containerscope", "prevent_encryption_scope_override": True } -TEST_SAS_ENCRYPTION_SCOPE = "testscope1" -TEST_SAS_ENCRYPTION_SCOPE_2 = "testscope2" # ------------------------------------------------------------------------------ @@ -45,10 +42,7 @@ def _setup(self, bsc): self.byte_data = self.get_random_bytes(64 * 1024) if self.is_live: - try: - bsc.create_container(self.container_name) - except: - pass + bsc.create_container(self.container_name) def _teardown(self, bsc): if self.is_live: @@ -64,11 +58,11 @@ def _teardown(self, bsc): def _get_blob_reference(self): return self.get_resource_name("cpk") - def _create_block_blob(self, bsc, blob_name=None, data=None, encryption_scope=None, max_concurrency=1, overwrite=False): + def _create_block_blob(self, bsc, blob_name=None, data=None, encryption_scope=None, max_concurrency=1): blob_name = blob_name if blob_name else self._get_blob_reference() blob_client = bsc.get_blob_client(self.container_name, blob_name) data = data if data else b'' - resp = blob_client.upload_blob(data, encryption_scope=encryption_scope, max_concurrency=max_concurrency, overwrite=overwrite) + resp = blob_client.upload_blob(data, encryption_scope=encryption_scope, max_concurrency=max_concurrency) return blob_client, resp def _create_append_blob(self, bsc, encryption_scope=None): @@ -128,131 +122,6 @@ def test_put_block_and_put_block_list(self, storage_account_name, storage_accoun self.assertEqual(blob.properties.encryption_scope, TEST_ENCRYPTION_KEY_SCOPE) self._teardown(bsc) - @pytest.mark.live_test_only - @BlobPreparer() - def test_put_block_and_put_block_list_with_blob_sas(self, storage_account_name, storage_account_key): - # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key, - connection_data_block_size=1024, - max_single_put_size=1024, - min_large_block_upload_threshold=1024, - max_block_size=1024, - max_page_size=1024) - self._setup(bsc) - - blob_name = self._get_blob_reference() - token1 = generate_blob_sas( - storage_account_name, - self.container_name, - blob_name, - account_key=storage_account_key, - permission=BlobSasPermissions(read=True, write=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1), - encryption_scope=TEST_SAS_ENCRYPTION_SCOPE, - ) - blob_client = BlobServiceClient(self.account_url(storage_account_name, "blob"), token1)\ - .get_blob_client(self.container_name, blob_name) - - blob_client.stage_block('1', b'AAA') - blob_client.stage_block('2', b'BBB') - blob_client.stage_block('3', b'CCC') - - # Act - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] - put_block_list_resp = blob_client.commit_block_list(block_list) - - # Assert - self.assertIsNotNone(put_block_list_resp['etag']) - self.assertIsNotNone(put_block_list_resp['last_modified']) - self.assertTrue(put_block_list_resp['request_server_encrypted']) - self.assertEqual(put_block_list_resp['encryption_scope'], TEST_SAS_ENCRYPTION_SCOPE) - - # Act get the blob content - blob = blob_client.download_blob() - - # Assert content was retrieved with the cpk - self.assertEqual(blob.readall(), b'AAABBBCCC') - self.assertEqual(blob.properties.etag, put_block_list_resp['etag']) - self.assertEqual(blob.properties.last_modified, put_block_list_resp['last_modified']) - self.assertEqual(blob.properties.encryption_scope, TEST_SAS_ENCRYPTION_SCOPE) - self._teardown(bsc) - - @pytest.mark.live_test_only - @BlobPreparer() - def test_put_block_and_put_block_list_with_blob_sas_fails(self, storage_account_name, storage_account_key): - # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key, - connection_data_block_size=1024, - max_single_put_size=1024, - min_large_block_upload_threshold=1024, - max_block_size=1024, - max_page_size=1024) - self._setup(bsc) - - blob_name = self._get_blob_reference() - token1 = generate_blob_sas( - storage_account_name, - self.container_name, - blob_name, - account_key=storage_account_key, - permission=BlobSasPermissions(read=True, write=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1), - encryption_scope=TEST_SAS_ENCRYPTION_SCOPE, - ) - blob_client = BlobServiceClient(self.account_url(storage_account_name, "blob"), token1)\ - .get_blob_client(self.container_name, blob_name) - - # both ses in SAS and encryption_scopes are both set and have DIFFERENT values will throw exception - with self.assertRaises(HttpResponseError): - blob_client.stage_block('1', b'AAA', encryption_scope=TEST_ENCRYPTION_KEY_SCOPE) - - # both ses in SAS and encryption_scopes are both set and have SAME values will succeed - blob_client.stage_block('1', b'AAA', encryption_scope=TEST_SAS_ENCRYPTION_SCOPE) - - # Act - block_list = [BlobBlock(block_id='1')] - # both ses in SAS and encryption_scopes are both set and have DIFFERENT values will throw exception - with self.assertRaises(HttpResponseError): - blob_client.commit_block_list(block_list, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE) - - # both ses in SAS and encryption_scopes are both set and have SAME values will succeed - put_block_list_resp = blob_client.commit_block_list(block_list, encryption_scope=TEST_SAS_ENCRYPTION_SCOPE) - - # Assert - self.assertIsNotNone(put_block_list_resp['etag']) - self.assertIsNotNone(put_block_list_resp['last_modified']) - self.assertTrue(put_block_list_resp['request_server_encrypted']) - self.assertEqual(put_block_list_resp['encryption_scope'], TEST_SAS_ENCRYPTION_SCOPE) - - # generate a sas with a different encryption scope - token2 = generate_blob_sas( - storage_account_name, - self.container_name, - blob_name, - account_key=storage_account_key, - permission=BlobSasPermissions(read=True, write=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1), - encryption_scope=TEST_ENCRYPTION_KEY_SCOPE, - ) - blob_client_diff_encryption_scope_sas = BlobServiceClient(self.account_url(storage_account_name, "blob"), token2)\ - .get_blob_client(self.container_name, blob_name) - - # blob can be downloaded successfully no matter which encryption scope is used on the blob actually - # the encryption scope on blob is TEST_SAS_ENCRYPTION_SCOPE and ses is TEST_ENCRYPTION_KEY_SCOPE in SAS token, - # while we can still download the blob successfully - blob = blob_client_diff_encryption_scope_sas.download_blob() - - # Assert content was retrieved with the cpk - self.assertEqual(blob.readall(), b'AAA') - self.assertEqual(blob.properties.etag, put_block_list_resp['etag']) - self.assertEqual(blob.properties.last_modified, put_block_list_resp['last_modified']) - self.assertEqual(blob.properties.encryption_scope, TEST_SAS_ENCRYPTION_SCOPE) - self._teardown(bsc) - @pytest.mark.live_test_only @pytest.mark.playback_test_only @BlobPreparer() @@ -778,218 +647,6 @@ def test_list_blobs(self, storage_account_name, storage_account_key): self._teardown(bsc) - @pytest.mark.live_test_only - @BlobPreparer() - def test_list_blobs_using_container_encryption_scope_sas(self, storage_account_name, storage_account_key): - # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key, - connection_data_block_size=1024, - max_single_put_size=1024, - min_large_block_upload_threshold=1024, - max_block_size=1024, - max_page_size=1024) - self._setup(bsc) - - token = generate_container_sas( - storage_account_name, - self.container_name, - storage_account_key, - permission=ContainerSasPermissions(read=True, write=True, list=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1), - encryption_scope=TEST_SAS_ENCRYPTION_SCOPE - ) - bsc_with_sas_credential = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=token, - connection_data_block_size=1024, - max_single_put_size=1024, - min_large_block_upload_threshold=1024, - max_block_size=1024, - max_page_size=1024) - # blob is encrypted using TEST_SAS_ENCRYPTION_SCOPE - blob_client, _ = self._create_block_blob(bsc_with_sas_credential, blob_name="blockblob", data=b'AAABBBCCC', overwrite=True) - self._create_append_blob(bsc_with_sas_credential) - - # generate a token with TEST_ENCRYPTION_KEY_SCOPE - token2 = generate_container_sas( - storage_account_name, - self.container_name, - storage_account_key, - permission=ContainerSasPermissions(read=True, write=True, list=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1), - encryption_scope=TEST_ENCRYPTION_KEY_SCOPE - ) - bsc_with_diff_sas_credential = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=token2, - connection_data_block_size=1024, - max_single_put_size=1024, - min_large_block_upload_threshold=1024, - max_block_size=1024, - max_page_size=1024) - container_client = bsc_with_diff_sas_credential.get_container_client(self.container_name) - - # The ses field in SAS token when list blobs is different from the encryption scope used on creating blob, while - # list blobs should also succeed - generator = container_client.list_blobs(include="metadata") - for blob in generator: - self.assertIsNotNone(blob) - # Assert: every listed blob has encryption_scope - # and the encryption scope is the same as the one on blob creation - self.assertEqual(blob.encryption_scope, TEST_SAS_ENCRYPTION_SCOPE) - - self._teardown(bsc) - - @pytest.mark.live_test_only - @BlobPreparer() - def test_copy_with_account_encryption_scope_sas(self, storage_account_name, storage_account_key): - # Arrange - sas_token = generate_account_sas( - storage_account_name, - account_key=storage_account_key, - resource_types=ResourceTypes(object=True, container=True), - permission=AccountSasPermissions(read=True, write=True, delete=True, list=True), - expiry=datetime.utcnow() + timedelta(hours=1), - encryption_scope=TEST_SAS_ENCRYPTION_SCOPE_2 - ) - bsc_with_sas_credential = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=sas_token, - connection_data_block_size=1024, - max_single_put_size=1024, - min_large_block_upload_threshold=1024, - max_block_size=1024, - max_page_size=1024) - - self._setup(bsc_with_sas_credential) - # blob is encrypted using TEST_SAS_ENCRYPTION_SCOPE_2 - blob_client, _ = self._create_block_blob(bsc_with_sas_credential, blob_name="blockblob", data=b'AAABBBCCC', overwrite=True) - - # - sas_token2 = generate_account_sas( - storage_account_name, - account_key=storage_account_key, - resource_types=ResourceTypes(object=True, container=True), - permission=AccountSasPermissions(read=True, write=True, delete=True, list=True), - expiry=datetime.utcnow() + timedelta(hours=1), - encryption_scope=TEST_SAS_ENCRYPTION_SCOPE - ) - bsc_with_account_key_credential = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=sas_token2, - connection_data_block_size=1024, - max_single_put_size=1024, - min_large_block_upload_threshold=1024, - max_block_size=1024, - max_page_size=1024) - copied_blob = self.get_resource_name('copiedblob') - copied_blob_client = bsc_with_account_key_credential.get_blob_client(self.container_name, copied_blob) - - # TODO: to confirm with Sean/Heidi ses in SAS cannot be set for async copy. - # The test failed for async copy (without requires_sync=True) - copied_blob_client.start_copy_from_url(blob_client.url, requires_sync=True) - - props = copied_blob_client.get_blob_properties() - - self.assertEqual(props.encryption_scope, TEST_SAS_ENCRYPTION_SCOPE) - - self._teardown(bsc_with_sas_credential) - - @pytest.mark.live_test_only - @BlobPreparer() - def test_copy_blob_from_url_with_ecryption_scope(self, storage_account_name, storage_account_key): - # Arrange - - # create sas for source blob - sas_token = generate_account_sas( - storage_account_name, - account_key=storage_account_key, - resource_types=ResourceTypes(object=True, container=True), - permission=AccountSasPermissions(read=True, write=True, delete=True, list=True), - expiry=datetime.utcnow() + timedelta(hours=1), - ) - bsc_with_sas_credential = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=sas_token, - connection_data_block_size=1024, - max_single_put_size=1024, - min_large_block_upload_threshold=1024, - max_block_size=1024, - max_page_size=1024) - - self._setup(bsc_with_sas_credential) - blob_client, _ = self._create_block_blob(bsc_with_sas_credential, blob_name="blockblob", data=b'AAABBBCCC', overwrite=True) - - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key, - connection_data_block_size=1024, - max_single_put_size=1024, - min_large_block_upload_threshold=1024, - max_block_size=1024, - max_page_size=1024) - copied_blob = self.get_resource_name('copiedblob') - copied_blob_client = bsc.get_blob_client(self.container_name, copied_blob) - - copied_blob_client.start_copy_from_url(blob_client.url, requires_sync=True, - encryption_scope=TEST_SAS_ENCRYPTION_SCOPE) - - props = copied_blob_client.get_blob_properties() - - self.assertEqual(props.encryption_scope, TEST_SAS_ENCRYPTION_SCOPE) - - self._teardown(bsc_with_sas_credential) - - @pytest.mark.live_test_only - @BlobPreparer() - def test_copy_with_user_delegation_encryption_scope_sas(self, storage_account_name, storage_account_key): - # Arrange - # to get user delegation key - oauth_token_credential = self.generate_oauth_token() - service_client = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=oauth_token_credential, - connection_data_block_size=1024, - max_single_put_size=1024, - min_large_block_upload_threshold=1024, - max_block_size=1024, - max_page_size=1024) - - user_delegation_key = service_client.get_user_delegation_key(datetime.utcnow(), - datetime.utcnow() + timedelta(hours=1)) - - self._setup(service_client) - - blob_name = self.get_resource_name('blob') - - sas_token = generate_blob_sas( - storage_account_name, - self.container_name, - blob_name, - account_key=user_delegation_key, - permission=BlobSasPermissions(read=True, write=True, create=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1), - encryption_scope=TEST_SAS_ENCRYPTION_SCOPE - ) - bsc_with_delegation_sas = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=sas_token, - connection_data_block_size=1024, - max_single_put_size=1024, - min_large_block_upload_threshold=1024, - max_block_size=1024, - max_page_size=1024) - - # blob is encrypted using TEST_SAS_ENCRYPTION_SCOPE - blob_client, _ = self._create_block_blob(bsc_with_delegation_sas, blob_name=blob_name, data=b'AAABBBCCC', overwrite=True) - props = blob_client.get_blob_properties() - - self.assertEqual(props.encryption_scope, TEST_SAS_ENCRYPTION_SCOPE) - - self._teardown(service_client) - @pytest.mark.playback_test_only @BlobPreparer() def test_create_container_with_default_cpk_n(self, storage_account_name, storage_account_key): diff --git a/sdk/storage/azure-storage-blob/tests/test_cpk_n_async.py b/sdk/storage/azure-storage-blob/tests/test_cpk_n_async.py index 42de916d8dc5..678f154e175b 100644 --- a/sdk/storage/azure-storage-blob/tests/test_cpk_n_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_cpk_n_async.py @@ -12,8 +12,7 @@ from azure.core.exceptions import HttpResponseError from azure.core.pipeline.transport import AioHttpTransport from multidict import CIMultiDict, CIMultiDictProxy -from azure.storage.blob import BlobType, BlobBlock, BlobSasPermissions, generate_blob_sas, ContainerEncryptionScope, \ - generate_container_sas, ContainerSasPermissions, generate_account_sas, ResourceTypes, AccountSasPermissions +from azure.storage.blob import BlobType, BlobBlock, BlobSasPermissions, generate_blob_sas, ContainerEncryptionScope from azure.storage.blob.aio import BlobServiceClient from settings.testcase import BlobPreparer from devtools_testutils.storage.aio import AsyncStorageTestCase @@ -28,8 +27,6 @@ "default_encryption_scope": "containerscope", "prevent_encryption_scope_override": True } -TEST_SAS_ENCRYPTION_SCOPE = "testscope1" -TEST_SAS_ENCRYPTION_SCOPE_2 = "testscope2" # ------------------------------------------------------------------------------ @@ -51,10 +48,7 @@ async def _setup(self, bsc): self.byte_data = self.get_random_bytes(64 * 1024) self.container_name = self.get_resource_name('utcontainer') if self.is_live: - try: - await bsc.create_container(self.container_name) - except: - pass + await bsc.create_container(self.container_name) def _teardown(self, bsc): @@ -72,11 +66,11 @@ def _teardown(self, bsc): def _get_blob_reference(self): return self.get_resource_name("cpk") - async def _create_block_blob(self, bsc, blob_name=None, data=None, encryption_scope=None, max_concurrency=1, overwrite=False): + async def _create_block_blob(self, bsc, blob_name=None, data=None, encryption_scope=None, max_concurrency=1): blob_name = blob_name if blob_name else self._get_blob_reference() blob_client = bsc.get_blob_client(self.container_name, blob_name) data = data if data else b'' - resp = await blob_client.upload_blob(data, encryption_scope=encryption_scope, max_concurrency=max_concurrency, overwrite=overwrite) + resp = await blob_client.upload_blob(data, encryption_scope=encryption_scope, max_concurrency=max_concurrency) return blob_client, resp async def _create_append_blob(self, bsc, encryption_scope=None): @@ -137,133 +131,6 @@ async def test_put_block_and_put_block_list(self, storage_account_name, storage_ self.assertEqual(blob.properties.last_modified, put_block_list_resp['last_modified']) self.assertEqual(blob.properties.encryption_scope, TEST_ENCRYPTION_KEY_SCOPE) - @pytest.mark.live_test_only - @BlobPreparer() - async def test_put_block_and_put_block_list_with_blob_sas(self, storage_account_name, storage_account_key): - # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key, - connection_data_block_size=1024, - max_single_put_size=1024, - min_large_block_upload_threshold=1024, - max_block_size=1024, - max_page_size=1024) - await self._setup(bsc) - - blob_name = self._get_blob_reference() - token1 = generate_blob_sas( - storage_account_name, - self.container_name, - blob_name, - account_key=storage_account_key, - permission=BlobSasPermissions(read=True, write=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1), - encryption_scope=TEST_SAS_ENCRYPTION_SCOPE, - ) - blob_client = BlobServiceClient(self.account_url(storage_account_name, "blob"), token1)\ - .get_blob_client(self.container_name, blob_name) - - await blob_client.stage_block('1', b'AAA') - await blob_client.stage_block('2', b'BBB') - await blob_client.stage_block('3', b'CCC') - - # Act - block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] - put_block_list_resp = await blob_client.commit_block_list(block_list) - - # Assert - self.assertIsNotNone(put_block_list_resp['etag']) - self.assertIsNotNone(put_block_list_resp['last_modified']) - self.assertTrue(put_block_list_resp['request_server_encrypted']) - self.assertEqual(put_block_list_resp['encryption_scope'], TEST_SAS_ENCRYPTION_SCOPE) - - # Act get the blob content - blob = await blob_client.download_blob() - content = await blob.readall() - - # Assert content was retrieved with the cpk - self.assertEqual(content, b'AAABBBCCC') - self.assertEqual(blob.properties.etag, put_block_list_resp['etag']) - self.assertEqual(blob.properties.last_modified, put_block_list_resp['last_modified']) - self.assertEqual(blob.properties.encryption_scope, TEST_SAS_ENCRYPTION_SCOPE) - self._teardown(bsc) - - @pytest.mark.live_test_only - @BlobPreparer() - async def test_put_block_and_put_block_list_with_blob_sas_fails(self, storage_account_name, storage_account_key): - # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key, - connection_data_block_size=1024, - max_single_put_size=1024, - min_large_block_upload_threshold=1024, - max_block_size=1024, - max_page_size=1024) - await self._setup(bsc) - - blob_name = self._get_blob_reference() - token1 = generate_blob_sas( - storage_account_name, - self.container_name, - blob_name, - account_key=storage_account_key, - permission=BlobSasPermissions(read=True, write=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1), - encryption_scope=TEST_SAS_ENCRYPTION_SCOPE, - ) - blob_client = BlobServiceClient(self.account_url(storage_account_name, "blob"), token1)\ - .get_blob_client(self.container_name, blob_name) - - # both ses in SAS and encryption_scopes are both set and have DIFFERENT values will throw exception - with self.assertRaises(HttpResponseError): - await blob_client.stage_block('1', b'AAA', encryption_scope=TEST_ENCRYPTION_KEY_SCOPE) - - # both ses in SAS and encryption_scopes are both set and have SAME values will succeed - await blob_client.stage_block('1', b'AAA', encryption_scope=TEST_SAS_ENCRYPTION_SCOPE) - - # Act - block_list = [BlobBlock(block_id='1')] - # both ses in SAS and encryption_scopes are both set and have DIFFERENT values will throw exception - with self.assertRaises(HttpResponseError): - await blob_client.commit_block_list(block_list, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE) - - # both ses in SAS and encryption_scopes are both set and have SAME values will succeed - put_block_list_resp = await blob_client.commit_block_list(block_list, encryption_scope=TEST_SAS_ENCRYPTION_SCOPE) - - # Assert - self.assertIsNotNone(put_block_list_resp['etag']) - self.assertIsNotNone(put_block_list_resp['last_modified']) - self.assertTrue(put_block_list_resp['request_server_encrypted']) - self.assertEqual(put_block_list_resp['encryption_scope'], TEST_SAS_ENCRYPTION_SCOPE) - - # generate a sas with a different encryption scope - token2 = generate_blob_sas( - storage_account_name, - self.container_name, - blob_name, - account_key=storage_account_key, - permission=BlobSasPermissions(read=True, write=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1), - encryption_scope=TEST_ENCRYPTION_KEY_SCOPE, - ) - blob_client_diff_encryption_scope_sas = BlobServiceClient(self.account_url(storage_account_name, "blob"), token2)\ - .get_blob_client(self.container_name, blob_name) - - # blob can be downloaded successfully no matter which encryption scope is used on the blob actually - # the encryption scope on blob is TEST_SAS_ENCRYPTION_SCOPE and ses is TEST_ENCRYPTION_KEY_SCOPE in SAS token, - # while we can still download the blob successfully - blob = await blob_client_diff_encryption_scope_sas.download_blob() - content = await blob.readall() - - # Assert content was retrieved with the cpk - self.assertEqual(content, b'AAA') - self.assertEqual(blob.properties.etag, put_block_list_resp['etag']) - self.assertEqual(blob.properties.last_modified, put_block_list_resp['last_modified']) - self.assertEqual(blob.properties.encryption_scope, TEST_SAS_ENCRYPTION_SCOPE) - self._teardown(bsc) - @pytest.mark.live_test_only @pytest.mark.playback_test_only @BlobPreparer() @@ -802,218 +669,6 @@ async def test_list_blobs(self, storage_account_name, storage_account_key): self._teardown(bsc) - @pytest.mark.live_test_only - @BlobPreparer() - async def test_list_blobs_using_container_encryption_scope_sas(self, storage_account_name, storage_account_key): - # Arrange - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key, - connection_data_block_size=1024, - max_single_put_size=1024, - min_large_block_upload_threshold=1024, - max_block_size=1024, - max_page_size=1024) - await self._setup(bsc) - - token = generate_container_sas( - storage_account_name, - self.container_name, - storage_account_key, - permission=ContainerSasPermissions(read=True, write=True, list=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1), - encryption_scope=TEST_SAS_ENCRYPTION_SCOPE - ) - bsc_with_sas_credential = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=token, - connection_data_block_size=1024, - max_single_put_size=1024, - min_large_block_upload_threshold=1024, - max_block_size=1024, - max_page_size=1024) - # blob is encrypted using TEST_SAS_ENCRYPTION_SCOPE - blob_client, _ = await self._create_block_blob(bsc_with_sas_credential, blob_name="blockblob", data=b'AAABBBCCC', overwrite=True) - await self._create_append_blob(bsc_with_sas_credential) - - # generate a token with TEST_ENCRYPTION_KEY_SCOPE - token2 = generate_container_sas( - storage_account_name, - self.container_name, - storage_account_key, - permission=ContainerSasPermissions(read=True, write=True, list=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1), - encryption_scope=TEST_ENCRYPTION_KEY_SCOPE - ) - bsc_with_diff_sas_credential = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=token2, - connection_data_block_size=1024, - max_single_put_size=1024, - min_large_block_upload_threshold=1024, - max_block_size=1024, - max_page_size=1024) - container_client = bsc_with_diff_sas_credential.get_container_client(self.container_name) - - # The ses field in SAS token when list blobs is different from the encryption scope used on creating blob, while - # list blobs should also succeed - generator = container_client.list_blobs(include="metadata") - async for blob in generator: - self.assertIsNotNone(blob) - # Assert: every listed blob has encryption_scope - # and the encryption scope is the same as the one on blob creation - self.assertEqual(blob.encryption_scope, TEST_SAS_ENCRYPTION_SCOPE) - - self._teardown(bsc) - - @pytest.mark.live_test_only - @BlobPreparer() - async def test_copy_with_account_encryption_scope_sas(self, storage_account_name, storage_account_key): - # Arrange - sas_token = generate_account_sas( - storage_account_name, - account_key=storage_account_key, - resource_types=ResourceTypes(object=True, container=True), - permission=AccountSasPermissions(read=True, write=True, delete=True, list=True), - expiry=datetime.utcnow() + timedelta(hours=1), - encryption_scope=TEST_SAS_ENCRYPTION_SCOPE_2 - ) - bsc_with_sas_credential = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=sas_token, - connection_data_block_size=1024, - max_single_put_size=1024, - min_large_block_upload_threshold=1024, - max_block_size=1024, - max_page_size=1024) - - await self._setup(bsc_with_sas_credential) - # blob is encrypted using TEST_SAS_ENCRYPTION_SCOPE_2 - blob_client, _ = await self._create_block_blob(bsc_with_sas_credential, blob_name="blockblob", data=b'AAABBBCCC', overwrite=True) - - # - sas_token2 = generate_account_sas( - storage_account_name, - account_key=storage_account_key, - resource_types=ResourceTypes(object=True, container=True), - permission=AccountSasPermissions(read=True, write=True, delete=True, list=True), - expiry=datetime.utcnow() + timedelta(hours=1), - encryption_scope=TEST_SAS_ENCRYPTION_SCOPE - ) - bsc_with_account_key_credential = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=sas_token2, - connection_data_block_size=1024, - max_single_put_size=1024, - min_large_block_upload_threshold=1024, - max_block_size=1024, - max_page_size=1024) - copied_blob = self.get_resource_name('copiedblob') - copied_blob_client = bsc_with_account_key_credential.get_blob_client(self.container_name, copied_blob) - - # TODO: to confirm with Sean/Heidi ses in SAS cannot be set for async copy. - # The test failed for async copy (without requires_sync=True) - await copied_blob_client.start_copy_from_url(blob_client.url, requires_sync=True) - - props = await copied_blob_client.get_blob_properties() - - self.assertEqual(props.encryption_scope, TEST_SAS_ENCRYPTION_SCOPE) - - self._teardown(bsc_with_sas_credential) - - @pytest.mark.live_test_only - @BlobPreparer() - async def test_copy_blob_from_url_with_ecryption_scope(self, storage_account_name, storage_account_key): - # Arrange - - # create sas for source blob - sas_token = generate_account_sas( - storage_account_name, - account_key=storage_account_key, - resource_types=ResourceTypes(object=True, container=True), - permission=AccountSasPermissions(read=True, write=True, delete=True, list=True), - expiry=datetime.utcnow() + timedelta(hours=1), - ) - bsc_with_sas_credential = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=sas_token, - connection_data_block_size=1024, - max_single_put_size=1024, - min_large_block_upload_threshold=1024, - max_block_size=1024, - max_page_size=1024) - - await self._setup(bsc_with_sas_credential) - blob_client, _ = await self._create_block_blob(bsc_with_sas_credential, blob_name="blockblob", data=b'AAABBBCCC', overwrite=True) - - bsc = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=storage_account_key, - connection_data_block_size=1024, - max_single_put_size=1024, - min_large_block_upload_threshold=1024, - max_block_size=1024, - max_page_size=1024) - copied_blob = self.get_resource_name('copiedblob') - copied_blob_client = bsc.get_blob_client(self.container_name, copied_blob) - - await copied_blob_client.start_copy_from_url(blob_client.url, requires_sync=True, - encryption_scope=TEST_SAS_ENCRYPTION_SCOPE) - - props = await copied_blob_client.get_blob_properties() - - self.assertEqual(props.encryption_scope, TEST_SAS_ENCRYPTION_SCOPE) - - self._teardown(bsc_with_sas_credential) - - @pytest.mark.live_test_only - @BlobPreparer() - async def test_copy_with_user_delegation_encryption_scope_sas(self, storage_account_name, storage_account_key): - # Arrange - # to get user delegation key - oauth_token_credential = self.generate_oauth_token() - service_client = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=oauth_token_credential, - connection_data_block_size=1024, - max_single_put_size=1024, - min_large_block_upload_threshold=1024, - max_block_size=1024, - max_page_size=1024) - - user_delegation_key = await service_client.get_user_delegation_key(datetime.utcnow(), - datetime.utcnow() + timedelta(hours=1)) - - await self._setup(service_client) - - blob_name = self.get_resource_name('blob') - - sas_token = generate_blob_sas( - storage_account_name, - self.container_name, - blob_name, - account_key=user_delegation_key, - permission=BlobSasPermissions(read=True, write=True, create=True, delete=True), - expiry=datetime.utcnow() + timedelta(hours=1), - encryption_scope=TEST_SAS_ENCRYPTION_SCOPE - ) - bsc_with_delegation_sas = BlobServiceClient( - self.account_url(storage_account_name, "blob"), - credential=sas_token, - connection_data_block_size=1024, - max_single_put_size=1024, - min_large_block_upload_threshold=1024, - max_block_size=1024, - max_page_size=1024) - - # blob is encrypted using TEST_SAS_ENCRYPTION_SCOPE - blob_client, _ = await self._create_block_blob(bsc_with_delegation_sas, blob_name=blob_name, data=b'AAABBBCCC', overwrite=True) - props = await blob_client.get_blob_properties() - - self.assertEqual(props.encryption_scope, TEST_SAS_ENCRYPTION_SCOPE) - - self._teardown(service_client) - @pytest.mark.playback_test_only @BlobPreparer() @AsyncStorageTestCase.await_prepared_test diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/models.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/models.py index 3d46f71da600..0aeb96a8aea7 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/models.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/models.py @@ -329,12 +329,6 @@ class AccountSasPermissions(object): To enable set or get tags on the blobs in the container. :keyword bool filter_by_tags: To enable get blobs by tags, this should be used together with list permission. - :keyword bool set_immutability_policy: - To enable operations related to set/delete immutability policy. - To get immutability policy, you just need read permission. - :keyword bool permanent_delete: - To enable permanent delete on the blob is permitted. - Valid for Object resource type of Blob only. """ def __init__(self, read=False, write=False, delete=False, list=False, # pylint: disable=redefined-builtin @@ -343,7 +337,6 @@ def __init__(self, read=False, write=False, delete=False, self.write = write self.delete = delete self.delete_previous_version = delete_previous_version - self.permanent_delete = kwargs.pop('permanent_delete', False) self.list = list self.add = add self.create = create @@ -351,20 +344,17 @@ def __init__(self, read=False, write=False, delete=False, self.process = process self.tag = kwargs.pop('tag', False) self.filter_by_tags = kwargs.pop('filter_by_tags', False) - self.set_immutability_policy = kwargs.pop('set_immutability_policy', False) self._str = (('r' if self.read else '') + ('w' if self.write else '') + ('d' if self.delete else '') + ('x' if self.delete_previous_version else '') + - ('y' if self.permanent_delete else '') + ('l' if self.list else '') + ('a' if self.add else '') + ('c' if self.create else '') + ('u' if self.update else '') + ('p' if self.process else '') + ('f' if self.filter_by_tags else '') + - ('t' if self.tag else '') + - ('i' if self.set_immutability_policy else '') + ('t' if self.tag else '') ) def __str__(self): @@ -387,7 +377,6 @@ def from_string(cls, permission): p_write = 'w' in permission p_delete = 'd' in permission p_delete_previous_version = 'x' in permission - p_permanent_delete = 'y' in permission p_list = 'l' in permission p_add = 'a' in permission p_create = 'c' in permission @@ -395,11 +384,9 @@ def from_string(cls, permission): p_process = 'p' in permission p_tag = 't' in permission p_filter_by_tags = 'f' in permission - p_set_immutability_policy = 'i' in permission parsed = cls(read=p_read, write=p_write, delete=p_delete, delete_previous_version=p_delete_previous_version, list=p_list, add=p_add, create=p_create, update=p_update, process=p_process, tag=p_tag, - filter_by_tags=p_filter_by_tags, set_immutability_policy=p_set_immutability_policy, - permanent_delete=p_permanent_delete) + filter_by_tags=p_filter_by_tags) return parsed diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/models.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/models.py index 355f569cd79e..27cd2360f81c 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/models.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/models.py @@ -329,12 +329,6 @@ class AccountSasPermissions(object): To enable set or get tags on the blobs in the container. :keyword bool filter_by_tags: To enable get blobs by tags, this should be used together with list permission. - :keyword bool set_immutability_policy: - To enable operations related to set/delete immutability policy. - To get immutability policy, you just need read permission. - :keyword bool permanent_delete: - To enable permanent delete on the blob is permitted. - Valid for Object resource type of Blob only. """ def __init__(self, read=False, write=False, delete=False, list=False, # pylint: disable=redefined-builtin @@ -343,7 +337,6 @@ def __init__(self, read=False, write=False, delete=False, self.write = write self.delete = delete self.delete_previous_version = delete_previous_version - self.permanent_delete = kwargs.pop('permanent_delete', False) self.list = list self.add = add self.create = create @@ -351,20 +344,17 @@ def __init__(self, read=False, write=False, delete=False, self.process = process self.tag = kwargs.pop('tag', False) self.filter_by_tags = kwargs.pop('filter_by_tags', False) - self.set_immutability_policy = kwargs.pop('set_immutability_policy', False) self._str = (('r' if self.read else '') + ('w' if self.write else '') + ('d' if self.delete else '') + ('x' if self.delete_previous_version else '') + - ('y' if self.permanent_delete else '') + ('l' if self.list else '') + ('a' if self.add else '') + ('c' if self.create else '') + ('u' if self.update else '') + ('p' if self.process else '') + ('f' if self.filter_by_tags else '') + - ('t' if self.tag else '') + - ('i' if self.set_immutability_policy else '') + ('t' if self.tag else '') ) def __str__(self): @@ -387,7 +377,6 @@ def from_string(cls, permission): p_write = 'w' in permission p_delete = 'd' in permission p_delete_previous_version = 'x' in permission - p_permanent_delete = 'y' in permission p_list = 'l' in permission p_add = 'a' in permission p_create = 'c' in permission @@ -395,11 +384,9 @@ def from_string(cls, permission): p_process = 'p' in permission p_tag = 't' in permission p_filter_by_tags = 'f' in permission - p_set_immutability_policy = 'i' in permission parsed = cls(read=p_read, write=p_write, delete=p_delete, delete_previous_version=p_delete_previous_version, list=p_list, add=p_add, create=p_create, update=p_update, process=p_process, tag=p_tag, - filter_by_tags=p_filter_by_tags, set_immutability_policy=p_set_immutability_policy, - permanent_delete=p_permanent_delete) + filter_by_tags=p_filter_by_tags) return parsed diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/models.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/models.py index 761e3de2ba7c..4a8d4a3dc513 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/models.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/models.py @@ -329,12 +329,6 @@ class AccountSasPermissions(object): To enable set or get tags on the blobs in the container. :keyword bool filter_by_tags: To enable get blobs by tags, this should be used together with list permission. - :keyword bool set_immutability_policy: - To enable operations related to set/delete immutability policy. - To get immutability policy, you just need read permission. - :keyword bool permanent_delete: - To enable permanent delete on the blob is permitted. - Valid for Object resource type of Blob only. """ def __init__(self, read=False, write=False, delete=False, list=False, # pylint: disable=redefined-builtin @@ -343,7 +337,6 @@ def __init__(self, read=False, write=False, delete=False, self.write = write self.delete = delete self.delete_previous_version = delete_previous_version - self.permanent_delete = kwargs.pop('permanent_delete', False) self.list = list self.add = add self.create = create @@ -351,20 +344,17 @@ def __init__(self, read=False, write=False, delete=False, self.process = process self.tag = kwargs.pop('tag', False) self.filter_by_tags = kwargs.pop('filter_by_tags', False) - self.set_immutability_policy = kwargs.pop('set_immutability_policy', False) self._str = (('r' if self.read else '') + ('w' if self.write else '') + ('d' if self.delete else '') + ('x' if self.delete_previous_version else '') + - ('y' if self.permanent_delete else '') + ('l' if self.list else '') + ('a' if self.add else '') + ('c' if self.create else '') + ('u' if self.update else '') + ('p' if self.process else '') + ('f' if self.filter_by_tags else '') + - ('t' if self.tag else '') + - ('i' if self.set_immutability_policy else '') + ('t' if self.tag else '') ) def __str__(self): @@ -387,7 +377,6 @@ def from_string(cls, permission): p_write = 'w' in permission p_delete = 'd' in permission p_delete_previous_version = 'x' in permission - p_permanent_delete = 'y' in permission p_list = 'l' in permission p_add = 'a' in permission p_create = 'c' in permission @@ -395,11 +384,9 @@ def from_string(cls, permission): p_process = 'p' in permission p_tag = 't' in permission p_filter_by_tags = 'f' in permission - p_set_immutability_policy = 'i' in permission parsed = cls(read=p_read, write=p_write, delete=p_delete, delete_previous_version=p_delete_previous_version, list=p_list, add=p_add, create=p_create, update=p_update, process=p_process, tag=p_tag, - filter_by_tags=p_filter_by_tags, set_immutability_policy=p_set_immutability_policy, - permanent_delete=p_permanent_delete) + filter_by_tags=p_filter_by_tags) return parsed